def setup_class(self): """Setup class parameters""" # create the tile index table. skip if it exists try: TileIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT) CuboidIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT) except Exception as e: pass self.tileindex_db = TileIndexDB(nd_proj.project_name, endpoint_url=settings.DYNAMO_ENDPOINT) self.tile_bucket = TileBucket(nd_proj.project_name, endpoint_url=settings.S3_ENDPOINT) [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0] supercuboid_key = 'testing' message_id = '123456' receipt_handle = 'testing123456' message = serializer.encodeDeleteMessage(supercuboid_key, message_id, receipt_handle) # insert message in the upload queue CleanupQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) self.cleanup_queue = CleanupQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT) self.cleanup_queue.sendMessage(message) # receive message and upload object for z_index in range(self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1): tile_handle = cStringIO.StringIO() self.tile_bucket.putObject(tile_handle, nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, z_index, message_id, receipt_handle)
def setup_class(self): """Setup parameters""" try: CuboidIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT) except Exception as e: pass self.cuboid_index = CuboidIndexDB( nd_proj.project_name, endpoint_url=settings.DYNAMO_ENDPOINT)
def setup_class(self): """Setup class parameters""" # create the tile index table. skip if it exists try: TileIndexDB.createTable(endpoint_url="http://localhost:8000") CuboidIndexDB.createTable(endpoint_url="http://localhost:8000") except Exception as e: pass self.tileindex_db = TileIndexDB(nd_proj.project_name, endpoint_url="http://localhost:8000") # create the tile bucket TileBucket.createBucket(endpoint_url="http://localhost:4567") self.tile_bucket = TileBucket(nd_proj.project_name, endpoint_url="http://localhost:4567") self.tiles = [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0] message_id = "testing" receipt_handle = "123456" # insert SUPER_CUBOID_SIZE tiles in the bucket for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1): tile_handle = cStringIO.StringIO() self.tile_bucket.putObject( tile_handle, nd_proj.channel_name, nd_proj.resolution, self.x_tile, self.y_tile, z_index, message_id, receipt_handle, ) # creating the cuboid bucket CuboidBucket.createBucket(endpoint_url="http://localhost:4567") self.cuboid_bucket = CuboidBucket(nd_proj.project_name, endpoint_url="http://localhost:4567") # create the ingest queue IngestQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568") self.ingest_queue = IngestQueue(nd_proj, endpoint_url="http://localhost:4568") # send message to the ingest queue morton_index = XYZMorton(self.tiles) supercuboid_key = self.cuboid_bucket.generateSupercuboidKey( nd_proj.channel_name, nd_proj.resolution, morton_index) response = self.ingest_queue.sendMessage(supercuboid_key) # create the cleanup queue CleanupQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")
def createTestDB ( project_name, channel_list=['unit_anno'], channel_type=ANNOTATION, channel_datatype=UINT32, public=PUBLIC_TRUE, ximagesize=10000, yimagesize=10000, zimagesize=1000, xvoxelres=4.0, yvoxelres=4.0, zvoxelres=3.0, scalingoption=ZSLICES, scalinglevels=5, readonly=READONLY_FALSE, propagate=NOT_PROPAGATED, window=[0,0], time=[0,15], default=False, nd_version=ND_VERSION, token_name='unittest', user='******', dataset_name="unittest", base_resolution=0): """Create a unit test data base on the specified sit and name""" # setting s3backend to true if Redis and creating s3 bucket and dynamo table if KV_ENGINE == REDIS: s3backend = S3_TRUE CuboidIndexDB.createTable() CuboidBucket.createBucket() else: s3backend = S3_FALSE unituser = User.objects.get(username=user) ds = NDDataset(Dataset ( dataset_name=dataset_name, user=unituser, ximagesize=ximagesize, yimagesize=yimagesize, zimagesize=zimagesize, xoffset=0, yoffset=0, zoffset=1, xvoxelres=xvoxelres, yvoxelres=yvoxelres, zvoxelres=zvoxelres, scalingoption=scalingoption, scalinglevels=scalinglevels, public=PUBLIC_TRUE, dataset_description="Unit test" ) ) ds.create() # make the project entry pr = NDProject(Project(project_name=project_name, project_description='Unit test', user=unituser, dataset=ds._ds, nd_version=nd_version, host='localhost', kvengine=KV_ENGINE, kvserver=KV_SERVER, s3backend=s3backend)) pr.create() # create a token tk = NDToken(Token (token_name = token_name, user = unituser, token_description = 'Unit test token', project_id = pr.project_name, public = public)) tk.create() # get the correct object for the kvengine # pd = NDProjectsDB.getProjDB(NDProjectpr) # create the database # pd.newNDProject() try: for channel_name in channel_list: ch = NDChannel(Channel (channel_name=channel_name, channel_type=channel_type, channel_datatype=channel_datatype, channel_description='Unit test channel', project_id=pr.project_name, readonly=readonly, propagate=propagate, resolution=base_resolution, exceptions=1, starttime=time[0], endtime=time[1] ,startwindow=window[0], endwindow=window[1], default=default)) # create a channel ch.create() # create the channel table # pd.newNDChannel(ch.channel_name) except Exception, e: print(e) raise e
# Copyright 2014 NeuroData (http://neurodata.io) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys sys.path += [os.path.abspath('../django')] import ND.settings os.environ['DJANGO_SETTINGS_MODULE'] = 'ND.settings' from ndingest.nddynamo.cuboidindexdb import CuboidIndexDB from ndingest.ndbucket.cuboidbucket import CuboidBucket # setup cuboid indext table and bucket locally CuboidIndexDB.createTable() CuboidBucket.createBucket()
def createTestDB(project_name, channel_list=['unit_anno'], channel_type=ANNOTATION, channel_datatype=UINT32, public=PUBLIC_TRUE, ximagesize=10000, yimagesize=10000, zimagesize=1000, xvoxelres=4.0, yvoxelres=4.0, zvoxelres=3.0, scalingoption=ZSLICES, scalinglevels=5, readonly=READONLY_FALSE, propagate=NOT_PROPAGATED, window=[0, 0], time=[0, 15], default=False, nd_version=ND_VERSION, token_name='unittest', user='******', dataset_name="unittest", base_resolution=0): """Create a unit test data base on the specified sit and name""" # setting s3backend to true if Redis and creating s3 bucket and dynamo table if KV_ENGINE == REDIS: s3backend = S3_TRUE CuboidIndexDB.createTable() CuboidBucket.createBucket() else: s3backend = S3_FALSE unituser = User.objects.get(username=user) ds = NDDataset( Dataset(dataset_name=dataset_name, user=unituser, ximagesize=ximagesize, yimagesize=yimagesize, zimagesize=zimagesize, xoffset=0, yoffset=0, zoffset=1, xvoxelres=xvoxelres, yvoxelres=yvoxelres, zvoxelres=zvoxelres, scalingoption=scalingoption, scalinglevels=scalinglevels, public=PUBLIC_TRUE, dataset_description="Unit test")) ds.create() # make the project entry pr = NDProject( Project(project_name=project_name, project_description='Unit test', user=unituser, dataset=ds._ds, nd_version=nd_version, host='localhost', kvengine=KV_ENGINE, kvserver=KV_SERVER, s3backend=s3backend)) pr.create() # create a token tk = NDToken( Token(token_name=token_name, user=unituser, token_description='Unit test token', project_id=pr.project_name, public=public)) tk.create() # get the correct object for the kvengine # pd = NDProjectsDB.getProjDB(NDProjectpr) # create the database # pd.newNDProject() try: for channel_name in channel_list: ch = NDChannel( Channel(channel_name=channel_name, channel_type=channel_type, channel_datatype=channel_datatype, channel_description='Unit test channel', project_id=pr.project_name, readonly=readonly, propagate=propagate, resolution=base_resolution, exceptions=1, starttime=time[0], endtime=time[1], startwindow=window[0], endwindow=window[1], default=default)) # create a channel ch.create() # create the channel table # pd.newNDChannel(ch.channel_name) except Exception, e: print(e) raise e