def delete(self, request, dataset_name, project_name): try: pr = NDProject.fromName(project_name) pr.delete() return HttpResponse(status=204) except Exception as e: return HttpResponseBadRequest()
def topkeys ( webargs ): """Return the most frequent keys in the database.""" [token, channel, otherargs] = webargs.split('/', 2) # get the project proj = NDProject.fromTokenName(token) # get the channel ch = ndproj.NDChannel(proj, channel) # and the ramon database with closing (RamonDB(proj)) as rdb: m = re.search ( "topkeys/(\d+)/(?:type/(\d+)/)?", otherargs ) # if we have a count clause use if m: count = int(m.group(1)) if m.group(2): anntype = int(m.group(2)) else: anntype = None else: count = 10 anntype = None topkeys = rdb.getTopKeys ( ch, count, anntype ) return json.dumps ( topkeys )
def buildStack(token, channel_name, resolution=None): """Wrapper for the different datatypes """ pr = NDProject.fromTokenName(token) ch = pr.getChannelObj(channel_name) try: # if ch.channel_type in ANNOTATION_CHANNELS and pr.kvengine == MYSQL: # clearStack(pr, ch, resolution) # build zslice and isotropic stack buildImageStack(pr, ch, resolution) # if zsclice stack then build neariso as well if pr.datasetcfg.scalingoption == ZSLICES: buildImageStack(pr, ch, resolution, neariso=True) # mark channel as propagated after it is done ch.propagate = PROPAGATED except Exception as e: # import pdb; pdb.set_trace() # clearStack(pr, ch, resolution) # RB This is a a thorny issue. anno propagate doesn't work when not PROPAGATED # mark it as not propagated if there is an error ch.propagate = NOT_PROPAGATED except MySQLdb.Error as e: # import pdb; pdb.set_trace() # clearStack(pr, ch, resolution) ch.propagate = NOT_PROPAGATED logger.error("Error in building image stack {}".format(token)) raise NDWSError("Error in the building image stack {}".format(token))
def createProject(self): project_obj = NDProject.fromName(self.project_name) project = model_to_dict(project_obj.pr) project['kvengine'] = REDIS project['host'] = 'localhost' project['s3backend'] = S3_TRUE del project['user'] del project['dataset'] try: response = getJson( 'https://{}/resource/dataset/{}/project/{}/'.format( self.host, self.dataset_name, self.project_name)) if response.status_code == 404: response = postJson( 'https://{}/resource/dataset/{}/project/{}/'.format( self.host, self.dataset_name, self.project_name), project) if response.status_code != 201: raise ValueError( 'The server returned status code {}'.format( response.status_code)) elif (response.status_code == 200) and (self.project_name == response.json()['project_name']): self.logger.warning( "Project already exists. Skipping Project creation") else: raise ValueError( 'The server returned status code {} and content {}'.format( response.status_code, response.json())) except Exception as e: self.logger.error(e) sys.exit(0)
def createChannel(self, channel_name): project = NDProject.fromName(self.project_name) channel_obj = project.getChannelObj(channel_name) channel = model_to_dict(channel_obj.ch) del channel['id'] del channel['project'] # del channel['user'] try: response = getJson( 'https://{}/resource/dataset/{}/project/{}/channel/{}/'.format( self.host, self.dataset_name, self.project_name, channel_name)) if response.status_code == 404: response = postJson( 'https://{}/resource/dataset/{}/project/{}/channel/{}/'. format(self.host, self.dataset_name, self.project_name, channel_name), channel) if response.status_code != 201: raise ValueError( 'The server returned status code {}'.format( response.status_code)) elif (response.status_code == 200) and (channel_name == response.json()['channel_name']): self.logger.warning( "Channel already exists. Skipping Channel creation") else: raise ValueError( 'The server returned status code {} and content {}'.format( response.status_code, response.json())) except Exception as e: self.logger.error(e) sys.exit(0)
def topkeys(webargs): """Return the most frequent keys in the database.""" [token, channel, otherargs] = webargs.split('/', 2) # get the project proj = NDProject.fromTokenName(token) # get the channel ch = ndproj.NDChannel(proj, channel) # and the ramon database with closing(RamonDB(proj)) as rdb: m = re.search("topkeys/(\d+)/(?:type/(\d+)/)?", otherargs) # if we have a count clause use if m: count = int(m.group(1)) if m.group(2): anntype = int(m.group(2)) else: anntype = None else: count = 10 anntype = None topkeys = rdb.getTopKeys(ch, count, anntype) return json.dumps(topkeys)
def get(self, request, dataset_name, project_name): try: pr = NDProject.fromName(project_name) return HttpResponse(pr.serialize(), content_type='application/json') except Project.DoesNotExist as e: return HttpResponseNotFound() except Exception as e: return HttpResponseBadRequest()
def getProject(self): try: response = getJson('https://{}/resource/dataset/{}/project/{}/'.format(self.host, self.dataset_name, self.project_name)) project_json = response.json() del project_json['user'] del project_json['dataset'] return NDProject.fromJson(self.dataset_name, json.dumps(project_json)) except Exception as e: self.logger.error(e) raise e
def post(self, request, dataset_name, project_name): try: pr = NDProject.fromJson(dataset_name, request.body) if request.user.is_authenticated(): pr.user_id = request.user.id else: pr.user_id = User.objects.get(username='******').id pr.create() return HttpResponse(status=201) except Exception as e: return HttpResponseBadRequest()
def getProject(self): try: response = getJson( 'https://{}/resource/dataset/{}/project/{}/'.format( self.host, self.dataset_name, self.project_name)) project_json = response.json() del project_json['user'] del project_json['dataset'] return NDProject.fromJson(self.dataset_name, json.dumps(project_json)) except Exception as e: self.logger.error(e) raise e
def createTestDB ( project_name, channel_list=['unit_anno'], channel_type=ANNOTATION, channel_datatype=UINT32, public=PUBLIC_TRUE, ximagesize=10000, yimagesize=10000, zimagesize=1000, xvoxelres=4.0, yvoxelres=4.0, zvoxelres=3.0, scalingoption=ZSLICES, scalinglevels=5, readonly=READONLY_FALSE, propagate=NOT_PROPAGATED, window=[0,0], time=[0,15], default=False, nd_version=ND_VERSION, token_name='unittest', user='******', dataset_name="unittest", base_resolution=0): """Create a unit test data base on the specified sit and name""" # setting s3backend to true if Redis and creating s3 bucket and dynamo table if KV_ENGINE == REDIS: s3backend = S3_TRUE CuboidIndexDB.createTable() CuboidBucket.createBucket() else: s3backend = S3_FALSE unituser = User.objects.get(username=user) ds = NDDataset(Dataset ( dataset_name=dataset_name, user=unituser, ximagesize=ximagesize, yimagesize=yimagesize, zimagesize=zimagesize, xoffset=0, yoffset=0, zoffset=1, xvoxelres=xvoxelres, yvoxelres=yvoxelres, zvoxelres=zvoxelres, scalingoption=scalingoption, scalinglevels=scalinglevels, public=PUBLIC_TRUE, dataset_description="Unit test" ) ) ds.create() # make the project entry pr = NDProject(Project(project_name=project_name, project_description='Unit test', user=unituser, dataset=ds._ds, nd_version=nd_version, host='localhost', kvengine=KV_ENGINE, kvserver=KV_SERVER, s3backend=s3backend)) pr.create() # create a token tk = NDToken(Token (token_name = token_name, user = unituser, token_description = 'Unit test token', project_id = pr.project_name, public = public)) tk.create() # get the correct object for the kvengine # pd = NDProjectsDB.getProjDB(NDProjectpr) # create the database # pd.newNDProject() try: for channel_name in channel_list: ch = NDChannel(Channel (channel_name=channel_name, channel_type=channel_type, channel_datatype=channel_datatype, channel_description='Unit test channel', project_id=pr.project_name, readonly=readonly, propagate=propagate, resolution=base_resolution, exceptions=1, starttime=time[0], endtime=time[1] ,startwindow=window[0], endwindow=window[1], default=default)) # create a channel ch.create() # create the channel table # pd.newNDChannel(ch.channel_name) except Exception, e: print(e) raise e
def createChannel(self, channel_name): project = NDProject.fromName(self.project_name) channel_obj = project.getChannelObj(channel_name) channel = model_to_dict(channel_obj.ch) del channel['id'] del channel['project'] # del channel['user'] try: response = getJson('https://{}/resource/dataset/{}/project/{}/channel/{}/'.format(self.host, self.dataset_name, self.project_name, channel_name)) if response.status_code == 404: response = postJson('https://{}/resource/dataset/{}/project/{}/channel/{}/'.format(self.host, self.dataset_name, self.project_name, channel_name), channel) if response.status_code != 201: raise ValueError('The server returned status code {}'.format(response.status_code)) elif (response.status_code == 200) and (channel_name == response.json()['channel_name']): self.logger.warning("Channel already exists. Skipping Channel creation") else: raise ValueError('The server returned status code {} and content {}'.format(response.status_code, response.json())) except Exception as e: self.logger.error(e) sys.exit(0)
def getAnnotation(webargs): """Fetch a RAMON object as HDF5 by object identifier""" [token, channel, otherargs] = webargs.split('/', 2) # get the project proj = NDProject.fromTokenName(token) ch = ndproj.NDChannel(proj, channel) # and the ramon database with closing(RamonDB(proj)) as rdb: try: option_args = otherargs.split('/') annoid = int(option_args[0]) annobj = getAnnoDictById(ch, annoid, proj, rdb) if 'boundingbox' in option_args: m = re.search("boundingbox/([\d]+)/", otherargs) if m: resolution = int(m.groups()[0]) else: resolution = ch.resolution with closing(SpatialDB(proj)) as db: bbcorner, bbdim = db.getBoundingBox( ch, [annoid], resolution) annobj[annoid]['bbcorner'] = bbcorner annobj[annoid]['bbdim'] = bbdim jsonstr = json.dumps(annobj) except Exception, e: logger.error("JSON get ID {}. Error {}. Webargs {}.".format( option_args[0], e, webargs)) raise NDWSError("JSON Get ID {}. Error {}.".format( option_args[0], e)) return jsonstr
def createProject(self): project_obj = NDProject.fromName(self.project_name) project = model_to_dict(project_obj.pr) project['kvengine'] = REDIS project['host'] = 'localhost' project['s3backend'] = S3_TRUE del project['user'] del project['dataset'] try: response = getJson('https://{}/resource/dataset/{}/project/{}/'.format(self.host, self.dataset_name, self.project_name)) if response.status_code == 404: response = postJson('https://{}/resource/dataset/{}/project/{}/'.format(self.host, self.dataset_name, self.project_name), project) if response.status_code != 201: raise ValueError('The server returned status code {}'.format(response.status_code)) elif (response.status_code == 200) and (self.project_name == response.json()['project_name']): self.logger.warning("Project already exists. Skipping Project creation") else: raise ValueError('The server returned status code {} and content {}'.format(response.status_code, response.json())) except Exception as e: self.logger.error(e) sys.exit(0)
def getAnnotation ( webargs ): """Fetch a RAMON object as HDF5 by object identifier""" [token, channel, otherargs] = webargs.split('/', 2) # get the project proj = NDProject.fromTokenName(token) ch = ndproj.NDChannel(proj, channel) # and the ramon database with closing ( RamonDB(proj) ) as rdb: try: option_args = otherargs.split('/') annoid = int(option_args[0]) annobj = getAnnoDictById ( ch, annoid, proj, rdb ) if 'boundingbox' in option_args: m = re.search ( "boundingbox/([\d]+)/", otherargs ) if m: resolution = int(m.groups()[0]) else: resolution = ch.resolution with closing (SpatialDB(proj)) as db: bbcorner, bbdim = db.getBoundingBox(ch, [annoid], resolution) annobj[annoid]['bbcorner'] = bbcorner annobj[annoid]['bbdim'] = bbdim jsonstr = json.dumps( annobj ) except Exception, e: logger.error("JSON get ID {}. Error {}. Webargs {}.".format( option_args[0], e, webargs )) raise NDWSError ("JSON Get ID {}. Error {}.".format( option_args[0], e )) return jsonstr
def deleteTestDB ( project_name, token_name='unittest' ): try: # get the objects tk = NDToken.fromName(token_name) tk.delete() pr = NDProject.fromName(project_name) ds = pr.datasetcfg # tk = Token.objects.get(token_name=token_name) # pr = Project.objects.get(project_name=project_name) # ds = Dataset.objects.get(dataset_name=pr.dataset_id) # get the channel list # channel_list = Channel.objects.filter(project_id=pr) # get the correct object for the kvengine # pd = NDProjectsDB.getProjDB(pr) for ch in pr.projectChannels(): ch.delete() # delete the channel table # pd.deleteNDChannel(ch.channel_name) # delete the channel # ch.delete() # delete the project database # pd.deleteNDProject() # delete the objects pr.delete() ds.delete() # delete s3 bucket and dynamo table if KV_ENGINE == REDIS: CuboidIndexDB.deleteTable() CuboidBucket.deleteBucket() except Exception, e: print(e) raise e
def query ( webargs ): """Return a list of IDs that match a key=value""" [token, channel, otherargs] = webargs.split('/', 2) # get the project proj = NDProject.fromTokenName(token) # get the channel ch = NDChannel.fromName(proj, channel) # and the ramon database with closing ( RamonDB(proj)) as rdb: m = re.search ( "query/([\w]+)/([\w]+)", otherargs ) if m: qrykey = m.group(1) qryvalue = m.group(2) else: logger.error("Invalid key/value query format") raise NDWSError ("Invalid key/value query format") ids = rdb.getKVQuery ( ch, qrykey, qryvalue ) return json.dumps ( ids.tolist() )
def query(webargs): """Return a list of IDs that match a key=value""" [token, channel, otherargs] = webargs.split('/', 2) # get the project proj = NDProject.fromTokenName(token) # get the channel ch = NDChannel.fromName(proj, channel) # and the ramon database with closing(RamonDB(proj)) as rdb: m = re.search("query/([\w]+)/([\w]+)", otherargs) if m: qrykey = m.group(1) qryvalue = m.group(2) else: logger.error("Invalid key/value query format") raise NDWSError("Invalid key/value query format") ids = rdb.getKVQuery(ch, qrykey, qryvalue) return json.dumps(ids.tolist())
def deleteTestDB(project_name, token_name='unittest'): try: # get the objects tk = NDToken.fromName(token_name) tk.delete() pr = NDProject.fromName(project_name) ds = pr.datasetcfg # tk = Token.objects.get(token_name=token_name) # pr = Project.objects.get(project_name=project_name) # ds = Dataset.objects.get(dataset_name=pr.dataset_id) # get the channel list # channel_list = Channel.objects.filter(project_id=pr) # get the correct object for the kvengine # pd = NDProjectsDB.getProjDB(pr) for ch in pr.projectChannels(): ch.delete() # delete the channel table # pd.deleteNDChannel(ch.channel_name) # delete the channel # ch.delete() # delete the project database # pd.deleteNDProject() # delete the objects pr.delete() ds.delete() # delete s3 bucket and dynamo table if KV_ENGINE == REDIS: CuboidIndexDB.deleteTable() CuboidBucket.deleteBucket() except Exception, e: print(e) raise e
def synaptogram_view (request, webargs): """Render a synaptogram as a Web page""" try: m = re.match ("(?P<token>[\d\w]+)/(?P<channels>[\d\w,]+)/(xy|xz|yz)/(?P<resolution>[\d]+)/(?P<xlow>[\d]+),(?P<xhigh>[\d]+)/(?P<ylow>[\d]+),(?P<yhigh>[\d]+)/(?P<zlow>[\d]+),(?P<zhigh>[\d]+)/", webargs) md = m.groupdict() token = md['token'] chanstr = md['channels'] resolution = int(md['resolution']) xlow = int(md['xlow']) xhigh = int(md['xhigh']) ylow = int(md['ylow']) yhigh = int(md['yhigh']) zlow = int(md['zlow']) zhigh = int(md['zhigh']) channels = chanstr.split(',') # get the project proj = NDProject.fromTokenName(token) # and the database and then call the db function with closing (SpatialDB(proj)) as db: # convert to cutout coordinates (xoffset, yoffset, zoffset) = proj.datasetcfg.get_offset(resolution) (xlow, xhigh) = (xlow-xoffset, xhigh-xoffset) (ylow, yhigh) = (ylow-yoffset, yhigh-yoffset) (zlow, zhigh) = (zlow-zoffset, zhigh-zoffset) corner = [ xlow, ylow, zlow ] dim = [ xhigh-xlow, yhigh-ylow, zhigh-zlow ] outputdict = {} # get the data region for each channel for chan in channels: # data type on a per channel basis ch = proj.getChannelObj(chan) try: cb = db.cutout ( ch, corner, dim, resolution ) # apply window for 16 bit projects if ch.getDataType() in DTYPE_uint16: [startwindow, endwindow] = window_range = ch.window_range if (endwindow != 0): cb.data = np.uint8(windowCutout(cb.data, window_range)) outputdict[chan] = [] for zslice in cb.data: if ch.getChannelType() in ANNOTATION_CHANNELS: # parse annotation project imagemap = np.zeros( [ dim[1], dim[0] ], dtype=np.uint32 ) imagemap = recolor_ctype( zslice, imagemap ) img = Image.frombuffer( 'RGBA', (dim[0],dim[1]), imagemap, 'raw', 'RGBA', 0, 1 ) else: # parse image project img = Image.frombuffer( 'L', (dim[0], dim[1]), zslice.flatten(), 'raw', 'L', 0, 1 ) # convert to base64 fileobj = cStringIO.StringIO() img.save(fileobj, "PNG") fileobj.seek(0) encodedimg = base64.b64encode(fileobj.read()) outputdict[chan].append(encodedimg) #outputdict[chan] = cb.data.tolist() outputdict['{}.dtype'.format(chan)] = str(cb.data.dtype) except KeyError: raise Exception ("Channel %s not found" % ( chan )) outputdict['shape'] = cb.data.shape jsonstr = json.dumps ( outputdict ) return django.http.HttpResponse(json.dumps(outputdict), content_type="application/json") except: raise
else: logger.error( "Dataset {} already exists and is different then the chosen dataset" .format(ds.dataset_name)) return HttpResponseBadRequest(json.dumps( "Dataset {} already exists and is different then the chosen dataset. Please choose a different dataset name" .format(ds.dataset_name)), content_type="application/json") else: ds.create() DATASET_CREATED = True # pr.dataset_id = ds.dataset_name # extracting project and token pr, tk = extractProjectDict(project_dict) pr = NDProject.fromJson(ds.dataset_name, pr) pr.user_id = 1 pr.kvengine = REDIS # Checking if the posted project already exists # Setting the foreign key for project if Project.objects.filter(project_name=pr.project_name).exists(): stored_pr = NDProject.fromName(pr.project_name) # Checking if the existing project is same as the posted one, here we compare their datasets since python behaves wierdly with sub-objects in other objects. this is not fool-proof but works as a good hack tk = NDToken.fromJson(pr.project_name, tk) tk.user_id = 1 if compareModelObjects(stored_pr.datasetcfg, pr.datasetcfg): if Token.objects.filter(token_name=tk.token_name).exists(): stored_tk = NDToken.fromName(tk.token_name) # tk.project_id = stored_pr.project_name # Checking if the existing token is same as the posted one
class MaxProjCatmaid: """Prefetch CATMAID tiles into MndcheDB""" def __init__(self): self.proj = None self.db = None self.token = None self.tilesz = 512 def __del__(self): pass def getTileXY(self, ch, res, xtile, ytile, zslice, width): """Cutout, return the image""" # figure out the cutout (limit to max image size) xstart = xtile * self.tilesz ystart = ytile * self.tilesz xend = min((xtile + 1) * self.tilesz, self.proj.datasetcfg.imageSize(res)[0][0]) yend = min((ytile + 1) * self.tilesz, self.proj.datasetcfg.imageSize(res)[0][1]) zstart = max(zslice - width, 0) zend = min(zslice + 1 + width, self.tilesz, self.proj.datasetcfg.imageSize(res)[0][2]) # call the mcfc interface imageargs = '{}/{},{}/{},{}/{},{}/'.format(res, xstart, xend, ystart, yend, zstart, zend) cutout = ndwsrest.cutout(imageargs, ch, self.proj, self.db) tiledata = np.amax(cutout.data, axis=0) tiledata = ndwsrest.window(tiledata, ch) # turn into an 8-bit image and return return Image.frombuffer('L', (tiledata.shape[1], tiledata.shape[0]), tiledata.flatten(), 'raw', 'L', 0, 1) def getTile(self, webargs): """Either fetch the file from mndche or get a mcfc image""" try: # arguments of format /token/channel/(?:width:3)/slice_type/z/x_y_res.png m = re.match( "(\w+)/([\w+,[:\w]*]*)(?:/width:([\d+]+))?/(xy|yz|xz)/(\d+)/(\d+)_(\d+)_(\d+).png", webargs) [self.token, channel, widthstr, slice_type] = [i for i in m.groups()[:4]] [ztile, ytile, xtile, res] = [int(i) for i in m.groups()[4:]] # extract the width as an integer width = int(widthstr) except Exception, e: logger.error("Incorrect arguments for getTile {}. {}".format( webargs, e)) raise NDWSError("Incorrect arguments for getTile {}. {}".format( webargs, e)) self.proj = NDProject.fromTokenName(self.token) ch = self.proj.getChannelObj(channel) with closing(SpatialDB(self.proj)) as self.db: tile = None if tile == None: if slice_type == 'xy': img = self.getTileXY(ch, res, xtile, ytile, ztile, width) # elif slice_type == 'xz': # img = self.getTileXZ(res, xtile, ytile, ztile, width) # elif slice_type == 'yz': # img = self.getTileYZ(res, xtile, ytile, ztile, width) else: logger.error( "Requested illegal image plane {}. Should be xy, xz, yz." .format(slice_type)) raise NDWSError( "Requested illegal image plane {}. Should be xy, xz, yz." .format(slice_type)) fobj = cStringIO.StringIO() img.save(fobj, "PNG") else: fobj = cStringIO.StringIO(tile) fobj.seek(0) return fobj
class SimpleCatmaid: """ Prefetch CATMAID tiles into MndcheDB """ def __init__(self): """ Bind the mndche """ self.proj = None self.channel = None self.tilesz = 512 # make the memcache connection self.mc = pylibmc.Client(["127.0.0.1"], binary=True, behaviors={ "tcp_nodelay": True, "ketama": True }) def __del__(self): pass def buildKey(self, res, slice_type, xtile, ytile, ztile, timetile, filterlist): return 'simple/{}/{}/{}/{}/{}/{}/{}/{}/{}'.format( self.token, self.channel, slice_type, res, xtile, ytile, ztile, timetile, filterlist) def cacheMissXY(self, res, xtile, ytile, ztile, timetile, filterlist): """On a miss. Cutout, return the image and load the cache in a background thread""" # make sure that the tile size is aligned with the cubedim if self.tilesz % self.proj.datasetcfg.cubedim[res][ 0] != 0 or self.tilesz % self.proj.datasetcfg.cubedim[res][1]: logger.error("Illegal tile size. Not aligned") raise NDWSError("Illegal tile size. Not aligned") # figure out the cutout (limit to max image size) xstart = xtile * self.tilesz ystart = ytile * self.tilesz xend = min((xtile + 1) * self.tilesz, self.proj.datasetcfg.get_imagesize(res)[0]) yend = min((ytile + 1) * self.tilesz, self.proj.datasetcfg.get_imagesize(res)[1]) # get an xy image slice if timetile is None: imageargs = '{}/{}/{}/{},{}/{},{}/{}/'.format( self.channel, 'xy', res, xstart, xend, ystart, yend, ztile) else: imageargs = '{}/{}/{}/{},{}/{},{}/{}/{}/'.format( self.channel, 'xy', res, xstart, xend, ystart, yend, ztile, timetile) # if filter list exists then add on for downstream processing if filterlist: imageargs = imageargs + 'filter/{}/'.format(filterlist) cb = ndwsrest.imgSlice(imageargs, self.proj, self.db) if cb.data.shape != (1, self.tilesz, self.tilesz) and cb.data.shape != ( 1, 1, self.tilesz, self.tilesz): if timetile is None: tiledata = np.zeros((1, self.tilesz, self.tilesz), cb.data.dtype) tiledata[0, 0:((yend - 1) % self.tilesz + 1), 0:((xend - 1) % self.tilesz + 1)] = cb.data[0, :, :] else: tiledata = np.zeros((1, 1, self.tilesz, self.tilesz), cb.data.dtype) tiledata[0, 0, 0:((yend - 1) % self.tilesz + 1), 0:((xend - 1) % self.tilesz + 1)] = cb.data[0, 0, :, :] cb.data = tiledata return cb.xyImage() def cacheMissXZ(self, res, xtile, ytile, ztile, timetile, filterlist): """On a miss. Cutout, return the image and load the cache in a background thread""" # make sure that the tile size is aligned with the cubedim if self.tilesz % self.proj.datasetcfg.cubedim[res][ 0] != 0 or self.tilesz % self.proj.datasetcfg.get_cubedim( res)[2]: raise ("Illegal tile size. Not aligned") # figure out the cutout (limit to max image size) xstart = xtile * self.tilesz xend = min((xtile + 1) * self.tilesz, self.proj.datasetcfg.get_imagesize(res)[0]) # OK this weird but we have to choose a convention. xtile ytile ztile refere to the URL request. So ztile is ydata # but xstart, zstart..... etc. refer to ndstore coordinates for the cutout. # # z cutouts need to get rescaled # we'll map to the closest pixel range and tolerate one pixel error at the boundary # scalefactor = zvoxel / yvoxel scalefactor = self.proj.datasetcfg.get_voxelres( res)[2] / self.proj.datasetcfg.get_voxelres(res)[1] zoffset = self.proj.datasetcfg.get_offset(res)[2] ztilestart = int((ytile * self.tilesz) / scalefactor) + zoffset zstart = max(ztilestart, zoffset) ztileend = int(math.ceil( (ytile + 1) * self.tilesz / scalefactor)) + zoffset zend = min(ztileend, self.proj.datasetcfg.get_imagesize(res)[2] + 1) # get an xz image slice if timetile is None: imageargs = '{}/{}/{}/{},{}/{}/{},{}/'.format( self.channel, 'xz', res, xstart, xend, ztile, zstart, zend) else: imageargs = '{}/{}/{}/{},{}/{}/{},{}/{}/'.format( self.channel, 'xz', res, xstart, xend, ztile, zstart, zend, timetile) if filterlist: imageargs = imageargs + 'filter/{}/'.format(filterlist) cb = ndwsrest.imgSlice(imageargs, self.proj, self.db) # scale by the appropriate amount if cb.data.shape != (ztileend - ztilestart, 1, self.tilesz) and cb.data.shape != ( 1, ztileend - ztilestart, 1, self.tilesz): if timetile is None: tiledata = np.zeros((ztileend - ztilestart, 1, self.tilesz), cb.data.dtype) tiledata[0:zend - zstart, 0, 0:((xend - 1) % self.tilesz + 1)] = cb.data[:, 0, :] else: tiledata = np.zeros((1, ztileend - ztilestart, 1, self.tilesz), cb.data.dtype) tiledata[0, 0:zend - zstart, 0, 0:((xend - 1) % self.tilesz + 1)] = cb.data[0, :, 0, :] cb.data = tiledata return cb.xzImage(scalefactor) def cacheMissYZ(self, res, xtile, ytile, ztile, timetile, filterlist): """ On a miss. Cutout, return the image and load the cache in a background thread """ # make sure that the tile size is aligned with the cubedim if self.tilesz % self.proj.datasetcfg.get_cubedim( res)[1] != 0 or self.tilesz % self.proj.datasetcfg.get_cubedim( res)[2]: raise ("Illegal tile size. Not aligned") # figure out the cutout (limit to max image size) ystart = ytile * self.tilesz yend = min((ytile + 1) * self.tilesz, self.proj.datasetcfg.get_imagesize(res)[1]) # z cutouts need to get rescaled # we'll map to the closest pixel range and tolerate one pixel error at the boundary # Scalefactor = zvoxel / xvoxel scalefactor = self.proj.datasetcfg.get_voxelres( res)[2] / self.proj.datasetcfg.get_voxelres(res)[0] zoffset = self.proj.datasetcfg.get_offset(res)[2] ztilestart = int((ztile * self.tilesz) / scalefactor) + zoffset zstart = max(ztilestart, zoffset) ztileend = int(math.ceil( (ztile + 1) * self.tilesz / scalefactor)) + zoffset zend = min(ztileend, self.proj.datasetcfg.get_imagesize(res)[2] + 1) # get an yz image slice if timetile is None: imageargs = '{}/{}/{}/{}/{},{}/{},{}/'.format( self.channel, 'yz', res, xtile, ystart, yend, zstart, zend) else: imageargs = '{}/{}/{}/{}/{},{}/{},{}/{}/'.format( self.channel, 'yz', res, xtile, ystart, yend, zstart, zend, timetile) if filterlist: imageargs = imageargs + 'filter/{}/'.format(filterlist) cb = ndwsrest.imgSlice(imageargs, self.proj, self.db) # scale by the appropriate amount if cb.data.shape != (ztileend - ztilestart, self.tilesz, 1) and cb.data.shape != (1, ztileend - ztilestart, self.tilesz, 1): if timetile is None: tiledata = np.zeros((ztileend - ztilestart, self.tilesz, 1), cb.data.dtype) tiledata[0:zend - zstart, 0:((yend - 1) % self.tilesz + 1), 0] = cb.data[:, :, 0] else: tiledata = np.zeros((1, ztileend - ztilestart, self.tilesz, 1), cb.data.dtype) tiledata[0, 0:zend - zstart, 0:((yend - 1) % self.tilesz + 1), 0] = cb.data[0, :, :, 0] cb.data = tiledata return cb.yzImage(scalefactor) def getTile(self, webargs): """Fetch the file from mndche or get a cutout from the database""" try: # argument of format token/channel/slice_type/z/y_x_res.png # p = re.compile("(\w+)/([\w+,]*?)/(xy|yz|xz|)/(\d+/)?(\d+)/(\d+)_(\d+)_(\d+).png") p = re.compile( "(\w+)/([\w+,]*?)/(xy|yz|xz|)/(?:filter/([\d,]+)/)?(?:(\d+)/)?(\d+)/(\d+)_(\d+)_(\d+).png" ) m = p.match(webargs) [self.token, self.channel, slice_type, filterlist] = [i for i in m.groups()[:4]] [timetile, ztile, ytile, xtile, res] = [ int(i.strip('/')) if i is not None else None for i in m.groups()[4:] ] except Exception, e: logger.error("Incorrect arguments give for getTile {}. {}".format( webargs, e)) raise NDWSError( "Incorrect arguments given for getTile {}. {}".format( webargs, e)) self.proj = NDProject.fromTokenName(self.token) with closing(SpatialDB(self.proj)) as self.db: # memcache key mckey = self.buildKey(res, slice_type, xtile, ytile, ztile, timetile, filterlist) # if tile is in memcache, return it tile = self.mc.get(mckey) tile = None if tile == None: if slice_type == 'xy': img = self.cacheMissXY(res, xtile, ytile, ztile, timetile, filterlist) elif slice_type == 'xz': img = self.cacheMissXZ(res, xtile, ytile, ztile, timetile, filterlist) elif slice_type == 'yz': img = self.cacheMissYZ(res, ztile, xtile, ytile, timetile, filterlist) else: logger.error( "Requested illegal image plance {}. Should be xy, xz, yz." .format(slice_type)) raise NDWSError( "Requested illegal image plance {}. Should be xy, xz, yz." .format(slice_type)) fobj = cStringIO.StringIO() img.save(fobj, "PNG") self.mc.set(mckey, fobj.getvalue()) else: print "Hit" fobj = cStringIO.StringIO(tile) fobj.seek(0) return fobj
if Dataset.objects.filter(dataset_name = ds.dataset_name).exists(): stored_ds = NDDataset.fromName(ds.dataset_name) if compareModelObjects(stored_ds, ds): pass # pr.dataset_id = stored_ds.dataset_name else: logger.error("Dataset {} already exists and is different then the chosen dataset".format(ds.dataset_name)) return HttpResponseBadRequest(json.dumps("Dataset {} already exists and is different then the chosen dataset. Please choose a different dataset name".format(ds.dataset_name)), content_type="application/json") else: ds.create() DATASET_CREATED = True # pr.dataset_id = ds.dataset_name # extracting project and token pr, tk = extractProjectDict(project_dict) pr = NDProject.fromJson(ds.dataset_name, pr) pr.user_id = 1 pr.kvengine = REDIS # Checking if the posted project already exists # Setting the foreign key for project if Project.objects.filter(project_name = pr.project_name).exists(): stored_pr = NDProject.fromName(pr.project_name) # Checking if the existing project is same as the posted one, here we compare their datasets since python behaves wierdly with sub-objects in other objects. this is not fool-proof but works as a good hack tk = NDToken.fromJson(pr.project_name, tk) tk.user_id = 1 if compareModelObjects(stored_pr.datasetcfg, pr.datasetcfg): if Token.objects.filter(token_name = tk.token_name).exists(): stored_tk = NDToken.fromName(tk.token_name) # tk.project_id = stored_pr.project_name # Checking if the existing token is same as the posted one
def createTestDB(project_name, channel_list=['unit_anno'], channel_type=ANNOTATION, channel_datatype=UINT32, public=PUBLIC_TRUE, ximagesize=10000, yimagesize=10000, zimagesize=1000, xvoxelres=4.0, yvoxelres=4.0, zvoxelres=3.0, scalingoption=ZSLICES, scalinglevels=5, readonly=READONLY_FALSE, propagate=NOT_PROPAGATED, window=[0, 0], time=[0, 15], default=False, nd_version=ND_VERSION, token_name='unittest', user='******', dataset_name="unittest", base_resolution=0): """Create a unit test data base on the specified sit and name""" # setting s3backend to true if Redis and creating s3 bucket and dynamo table if KV_ENGINE == REDIS: s3backend = S3_TRUE CuboidIndexDB.createTable() CuboidBucket.createBucket() else: s3backend = S3_FALSE unituser = User.objects.get(username=user) ds = NDDataset( Dataset(dataset_name=dataset_name, user=unituser, ximagesize=ximagesize, yimagesize=yimagesize, zimagesize=zimagesize, xoffset=0, yoffset=0, zoffset=1, xvoxelres=xvoxelres, yvoxelres=yvoxelres, zvoxelres=zvoxelres, scalingoption=scalingoption, scalinglevels=scalinglevels, public=PUBLIC_TRUE, dataset_description="Unit test")) ds.create() # make the project entry pr = NDProject( Project(project_name=project_name, project_description='Unit test', user=unituser, dataset=ds._ds, nd_version=nd_version, host='localhost', kvengine=KV_ENGINE, kvserver=KV_SERVER, s3backend=s3backend)) pr.create() # create a token tk = NDToken( Token(token_name=token_name, user=unituser, token_description='Unit test token', project_id=pr.project_name, public=public)) tk.create() # get the correct object for the kvengine # pd = NDProjectsDB.getProjDB(NDProjectpr) # create the database # pd.newNDProject() try: for channel_name in channel_list: ch = NDChannel( Channel(channel_name=channel_name, channel_type=channel_type, channel_datatype=channel_datatype, channel_description='Unit test channel', project_id=pr.project_name, readonly=readonly, propagate=propagate, resolution=base_resolution, exceptions=1, starttime=time[0], endtime=time[1], startwindow=window[0], endwindow=window[1], default=default)) # create a channel ch.create() # create the channel table # pd.newNDChannel(ch.channel_name) except Exception, e: print(e) raise e
def get(self, request): try: return HttpResponse(json.dumps(NDProject.public_list()), content_type='application/json') except Exception as e: return HttpResponseBadRequest()
def genGraphRAMON(token_name, channel, graphType="graphml", xmin=0, xmax=0, ymin=0, ymax=0, zmin=0, zmax=0): """Generate the graph based on different inputs""" # converting all parameters to integers [xmin, xmax, ymin, ymax, zmin, zmax] = [int(i) for i in [xmin, xmax, ymin, ymax, zmin, zmax]] proj = NDProject.fromTokenName(token_name) with closing(ramondb.RamonDB(proj)) as db: ch = proj.getChannelObj(channel) resolution = ch.resolution cubeRestrictions = xmin + xmax + ymin + ymax + zmin + zmax matrix = [] # assumption that the channel is a neuron channel if cubeRestrictions != 0: idslist = getAnnoIds(proj, ch, resolution, xmin, xmax, ymin, ymax, zmin, zmax) else: # entire cube [xmax, ymax, zmax] = proj.datasetcfg.get_imagesize(resolution) idslist = getAnnoIds(proj, ch, resolution, xmin, xmax, ymin, ymax, zmin, zmax) if idslist.size == 0: logger.error( "Area specified x:{},{} y:{},{} z:{},{} is empty".format( xmin, xmax, ymin, ymax, zmin, zmax)) raise NDWSError( "Area specified x:{},{} y:{},{} z:{},{} is empty".format( xmin, xmax, ymin, ymax, zmin, zmax)) annos = {} for i in idslist: tmp = db.getAnnotation(ch, i) if int(db.annodb.getAnnotationKV( ch, i)['ann_type']) == annotation.ANNO_SYNAPSE: annos[i] = [ int(s) for s in tmp.getField('segments').split(',') ] # create and export graph outputGraph = nx.Graph() for key in annos: outputGraph.add_edges_from([tuple(annos[key])]) try: f = tempfile.NamedTemporaryFile() if graphType.upper() == "GRAPHML": nx.write_graphml(outputGraph, f) elif graphType.upper() == "ADJLIST": nx.write_adjlist(outputGraph, f) elif graphType.upper() == "EDGELIST": nx.write_edgelist(outputGraph, f) elif graphType.upper() == "GEXF": nx.write_gexf(outputGraph, f) elif graphType.upper() == "GML": nx.write_gml(outputGraph, f) elif graphType.upper() == "GPICKLE": nx.write_gpickle(outputGraph, f) elif graphType.upper() == "YAML": nx.write_yaml(outputGraph, f) elif graphType.upper() == "PAJEK": nx.write_net(outputGraph, f) else: nx.write_graphml(outputGraph, f) f.flush() f.seek(0) except: logger.error( "Internal file error in creating/editing a NamedTemporaryFile") f.close() raise NDWSError( "Internal file error in creating/editing a NamedTemporaryFile") return (f, graphType.lower())
class MCFCCatmaid: """Prefetch CATMAID tiles into MndcheDB""" def __init__(self): """Bind the mndche""" self.proj = None self.db = None self.token = None self.tilesz = 512 self.colors = ('C', 'M', 'Y', 'R', 'G', 'B') self.channel_list = None # make the mndche connection self.mc = pylibmc.Client(["127.0.0.1"], binary=True, behaviors={ "tcp_nodelay": True, "ketama": True }) def __del__(self): pass def buildKey(self, res, xtile, ytile, zslice): return 'mcfc/{}/{}/{}/{}/{}/{}/{}'.format(self.token, ','.join(self.channel_list), ','.join(self.colors), res, xtile, ytile, zslice) def cacheMissXY(self, res, xtile, ytile, zslice): """On a miss. Cutout, return the image and load the cache in a background thread""" # figure out the cutout (limit to max image size) xstart = xtile * self.tilesz ystart = ytile * self.tilesz xend = min((xtile + 1) * self.tilesz, self.proj.datasetcfg.get_imagesize(res)[0]) yend = min((ytile + 1) * self.tilesz, self.proj.datasetcfg.get_imagesize(res)[1]) # call the mcfc interface imageargs = '{}/{},{}/{},{}/{},{}/'.format(res, xstart, xend, ystart, yend, zslice, zslice + 1) tiledata = None for index, channel_name in enumerate(self.channel_list): ch = self.proj.getChannelObj(channel_name) cutout = ndwsrest.cutout(imageargs, ch, self.proj, self.db) # initialize the tiledata by type if tiledata == None: tiledata = np.zeros( (len(self.channel_list), cutout.data.shape[0], self.tilesz, self.tilesz), dtype=cutout.data.dtype) tiledata[index, 0, 0:((yend - 1) % self.tilesz + 1), 0:((xend - 1) % self.tilesz + 1)] = cutout.data[0, :, :] tiledata[index, :] = ndwsrest.window(tiledata[index, :], ch) # We have an compound array. Now color it. return mcfcPNG( tiledata.reshape( (tiledata.shape[0], tiledata.shape[2], tiledata.shape[3])), self.colors) def cacheMissXZ(self, res, xtile, yslice, ztile): """On a miss. Cutout, return the image and load the cache in a background thread""" # figure out the cutout (limit to max image size) xstart = xtile * self.tilesz xend = min((xtile + 1) * self.tilesz, self.proj.datasetcfg.get_imagesize(res)[0]) # z cutouts need to get rescaled # we'll map to the closest pixel range and tolerate one pixel error at the boundary scalefactor = self.proj.datasetcfg.get_scale(res)['xz'] zoffset = self.proj.datasetcfg.get_offset(res)[2] ztilestart = int((ztile * self.tilesz) / scalefactor) + zoffset zstart = max(ztilestart, zoffset) ztileend = int(math.ceil( ((ztile + 1) * self.tilesz) / scalefactor)) + zoffset zend = min(ztileend, self.proj.datasetcfg.get_imagesize(res)[2]) # call the mcfc interface imageargs = '{}/{},{}/{},{}/{},{}/'.format(res, xstart, xend, yslice, yslice + 1, zstart, zend) tiledata = None for index, channel_name in enumerate(self.channel_list): ch = self.proj.getChannelObj(channel_name) cutout = ndwsrest.cutout(imageargs, ch, self.proj, self.db) # initialize the tiledata by type if tiledata == None: tiledata = np.zeros((len(self.channel_list), zend - zstart, cutout.data.shape[1], self.tilesz), dtype=cutout.data.dtype) tiledata[index, 0:zend - zstart, 0, 0:((xend - 1) % self.tilesz + 1)] = cutout.data[:, 0, :] tiledata = ndwsrest.window(tiledata, ch) # We have an compound array. Now color it. img = mcfc.mcfcPNG( tiledata.reshape( (tiledata.shape[0], tiledata.shape[1], tiledata.shape[3])), self.colors) return img.resize((self.tilesz, self.tilesz)) def cacheMissYZ(self, res, xtile, ytile, ztile): """On a miss. Cutout, return the image and load the cache in a background thread""" # figure out the cutout (limit to max image size) ystart = ytile * self.tilesz yend = min((ytile + 1) * self.tilesz, self.proj.datasetcfg.get_imagesize(res)[1]) # z cutouts need to get rescaled # we'll map to the closest pixel range and tolerate one pixel error at the boundary scalefactor = self.proj.datasetcfg.get_scale(res)['yz'] zoffset = self.proj.datasetcfg.get_offset(res)[2] ztilestart = int((ztile * self.tilesz) / scalefactor) + zoffset zstart = max(ztilestart, zoffset) ztileend = int(math.ceil( ((ztile + 1) * self.tilesz) / scalefactor)) + zoffset zend = min(ztileend, self.proj.datasetcfg.get_imagesize(res)[2]) # call the mcfc interface imageargs = '{}/{},{}/{},{}/{},{}/'.format(res, xtile, xtile + 1, ystart, yend, zstart, zend) tiledata = None for index, channel_name in enumerate(self.channel_list): ch = self.proj.getChannelObj(channel_name) cutout = ndwsrest.cutout(imageargs, ch, self.proj, self.db) # initialize the tiledata by type if tiledata == None: tiledata = np.zeros( (len(self.channel_list), ztileend - ztilestart, self.tilesz, cutout.data.shape[2]), dtype=cutout.data.dtype) tiledata[index, 0:zend - zstart, 0:((yend - 1) % self.tilesz + 1), 0] = cutout.data[:, :, 0] tiledata = ndwsrest.window(tiledata, ch) # We have an compound array. Now color it. img = mcfc.mcfcPNG( tiledata.reshape( (tiledata.shape[0], tiledata.shape[1], tiledata.shape[2])), self.colors) return img.resize((self.tilesz, self.tilesz)) def getTile(self, webargs): """Either fetch the file from mndche or get a mcfc image""" try: # arguments of format /token/channel/slice_type/z/x_y_res.png m = re.match( "(\w+)/([\w+,[:\w]*]*)/(xy|yz|xz)/(\d+)/(\d+)_(\d+)_(\d+).png", webargs) [self.token, channels, slice_type] = [i for i in m.groups()[:3]] [ztile, ytile, xtile, res] = [int(i) for i in m.groups()[3:]] #self.channel_list, self.colors = zip(*[i.groups() for i in map(re.compile("(\w+):(\w+)").match, re.split(',', channels))]) # check for channel_name:color and put them in the designated list try: self.channel_list, colors = zip( *re.findall("(\w+)[:]?(\w)?", channels)) # checking for a non-empty list if not not filter(None, colors): # if it is a mixed then replace the missing ones with the existing schema self.colors = [ b if a is u'' else a for a, b in zip(colors, self.colors) ] except Exception, e: logger.error( "Incorrect channel formst for getTile {}. {}".format( channels, e)) raise NDWSError( "Incorrect channel format for getTile {}. {}".format( channels, e)) #self.colors = [] except Exception, e: logger.error("Incorrect arguments for getTile {}. {}".format( webargs, e)) raise NDWSError("Incorrect arguments for getTile {}. {}".format( webargs, e)) self.proj = NDProject.fromTokenName(self.token) with closing(SpatialDB(self.proj)) as self.db: # mndche key mckey = self.buildKey(res, xtile, ytile, ztile) # if tile is in mndche, return it tile = self.mc.get(mckey) if tile == None: if slice_type == 'xy': img = self.cacheMissXY(res, xtile, ytile, ztile) elif slice_type == 'xz': img = self.cacheMissXZ(res, xtile, ytile, ztile) elif slice_type == 'yz': img = self.cacheMissYZ(res, xtile, ytile, ztile) else: logger.error( "Requested illegal image plance {}. Should be xy, xz, yz." .format(slice_type)) raise NDWSError( "Requested illegal image plance {}. Should be xy, xz, yz." .format(slice_type)) fobj = cStringIO.StringIO() img.save(fobj, "PNG") self.mc.set(mckey, fobj.getvalue()) else: fobj = cStringIO.StringIO(tile) fobj.seek(0) return fobj
def genGraphRAMON(token_name, channel, graphType="graphml", xmin=0, xmax=0, ymin=0, ymax=0, zmin=0, zmax=0): """Generate the graph based on different inputs""" # converting all parameters to integers [xmin, xmax, ymin, ymax, zmin, zmax] = [int(i) for i in [xmin, xmax, ymin, ymax, zmin, zmax]] proj = NDProject.fromTokenName(token_name) with closing (ramondb.RamonDB(proj)) as db: ch = proj.getChannelObj(channel) resolution = ch.resolution cubeRestrictions = xmin + xmax + ymin + ymax + zmin + zmax matrix = [] # assumption that the channel is a neuron channel if cubeRestrictions != 0: idslist = getAnnoIds(proj, ch, resolution, xmin, xmax, ymin, ymax, zmin, zmax) else: # entire cube [xmax, ymax, zmax] = proj.datasetcfg.get_imagesize(resolution) idslist = getAnnoIds(proj, ch, resolution, xmin, xmax, ymin, ymax, zmin, zmax) if idslist.size == 0: logger.error("Area specified x:{},{} y:{},{} z:{},{} is empty".format(xmin, xmax, ymin, ymax, zmin, zmax)) raise NDWSError("Area specified x:{},{} y:{},{} z:{},{} is empty".format(xmin, xmax, ymin, ymax, zmin, zmax)) annos = {} for i in idslist: tmp = db.getAnnotation(ch, i) if int(db.annodb.getAnnotationKV(ch, i)['ann_type']) == annotation.ANNO_SYNAPSE: annos[i]=[int(s) for s in tmp.getField('segments').split(',')] # create and export graph outputGraph = nx.Graph() for key in annos: outputGraph.add_edges_from([tuple(annos[key])]) try: f = tempfile.NamedTemporaryFile() if graphType.upper() == "GRAPHML": nx.write_graphml(outputGraph, f) elif graphType.upper() == "ADJLIST": nx.write_adjlist(outputGraph, f) elif graphType.upper() == "EDGELIST": nx.write_edgelist(outputGraph, f) elif graphType.upper() == "GEXF": nx.write_gexf(outputGraph, f) elif graphType.upper() == "GML": nx.write_gml(outputGraph, f) elif graphType.upper() == "GPICKLE": nx.write_gpickle(outputGraph, f) elif graphType.upper() == "YAML": nx.write_yaml(outputGraph, f) elif graphType.upper() == "PAJEK": nx.write_net(outputGraph, f) else: nx.write_graphml(outputGraph, f) f.flush() f.seek(0) except: logger.error("Internal file error in creating/editing a NamedTemporaryFile") f.close() raise NDWSError("Internal file error in creating/editing a NamedTemporaryFile") return (f, graphType.lower())