Ejemplo n.º 1
0
    def teardown_class(self):
        """Teardown class parameters"""

        # cleanup tilebucket
        for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1):
            tile_key = self.tile_bucket.encodeObjectKey(
                nd_proj.channel_name,
                nd_proj.resolution,
                self.x_tile,
                self.y_tile,
                z_index,
            )
            self.tile_bucket.deleteObject(tile_key)

        morton_index = XYZMorton(self.tiles)
        supercuboid_key = self.cuboid_bucket.generateSupercuboidKey(
            nd_proj.channel_name, nd_proj.resolution, self.tiles)
        self.cuboid_bucket.deleteObject(supercuboid_key)
        # delete created entities
        TileIndexDB.deleteTable(endpoint_url="http://localhost:8000")
        CuboidIndexDB.deleteTable(endpoint_url="http://localhost:8000")
        IngestQueue.deleteQueue(nd_proj, endpoint_url="http://localhost:4568")
        CleanupQueue.deleteQueue(nd_proj, endpoint_url="http://localhost:4568")
        TileBucket.deleteBucket(endpoint_url="http://localhost:4567")
        try:
            CuboidBucket.deleteBucket(endpoint_url="http://localhost:4567")
        except Exception as e:
            pass
Ejemplo n.º 2
0
    def teardown_class(self):
        """Teardown Parameters"""

        # Ensure bucket empty before deleting.
        for objs in self.cuboid_bucket.getAllObjects():
            self.cuboid_bucket.deleteObject(objs.key)

        CuboidBucket.deleteBucket(endpoint_url=self.endpoint_url)
Ejemplo n.º 3
0
 def __init__(self, pr):
     """Create the database connection"""
     self.pr = pr
     # create connections for cuboid bucket and cuboid dyanmo table
     self.cuboid_bucket = CuboidBucket(
         self.pr.project_name, endpoint_url=ndingest_settings.S3_ENDPOINT)
     self.cuboidindex_db = CuboidIndexDB(
         self.pr.project_name,
         endpoint_url=ndingest_settings.DYNAMO_ENDPOINT)
Ejemplo n.º 4
0
 def setup_class(self):
     """Setup Parameters"""
     if 'S3_ENDPOINT' in dir(settings):
         self.endpoint_url = settings.S3_ENDPOINT
     else:
         self.endpoint_url = None
     CuboidBucket.createBucket(endpoint_url=self.endpoint_url)
     self.cuboid_bucket = CuboidBucket(nd_proj.project_name,
                                       endpoint_url=self.endpoint_url)
Ejemplo n.º 5
0
  def __init__(self, token_name, host_name=HOST_NAME):
    # configuring the logger based on the dataset we are uploading
    self.logger = logging.getLogger(token_name)
    self.logger.setLevel(logging.INFO)
    fh = logging.FileHandler('{}_upload.log'.format(token_name))
    self.logger.addHandler(fh)

    self.info_interface = InfoInterface(host_name, token_name)
    self.project_name = self.info_interface.project_name
    self.cuboidindex_db = CuboidIndexDB(self.project_name)
    self.cuboid_bucket = CuboidBucket(self.project_name)
Ejemplo n.º 6
0
    def setup_class(self):
        """Setup class parameters"""

        # create the tile index table. skip if it exists
        try:
            TileIndexDB.createTable(endpoint_url="http://localhost:8000")
            CuboidIndexDB.createTable(endpoint_url="http://localhost:8000")
        except Exception as e:
            pass
        self.tileindex_db = TileIndexDB(nd_proj.project_name,
                                        endpoint_url="http://localhost:8000")

        # create the tile bucket
        TileBucket.createBucket(endpoint_url="http://localhost:4567")
        self.tile_bucket = TileBucket(nd_proj.project_name,
                                      endpoint_url="http://localhost:4567")
        self.tiles = [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0]

        message_id = "testing"
        receipt_handle = "123456"
        # insert SUPER_CUBOID_SIZE tiles in the bucket
        for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1):
            tile_handle = cStringIO.StringIO()
            self.tile_bucket.putObject(
                tile_handle,
                nd_proj.channel_name,
                nd_proj.resolution,
                self.x_tile,
                self.y_tile,
                z_index,
                message_id,
                receipt_handle,
            )

        # creating the cuboid bucket
        CuboidBucket.createBucket(endpoint_url="http://localhost:4567")
        self.cuboid_bucket = CuboidBucket(nd_proj.project_name,
                                          endpoint_url="http://localhost:4567")

        # create the ingest queue
        IngestQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")
        self.ingest_queue = IngestQueue(nd_proj,
                                        endpoint_url="http://localhost:4568")

        # send message to the ingest queue
        morton_index = XYZMorton(self.tiles)
        supercuboid_key = self.cuboid_bucket.generateSupercuboidKey(
            nd_proj.channel_name, nd_proj.resolution, morton_index)
        response = self.ingest_queue.sendMessage(supercuboid_key)

        # create the cleanup queue
        CleanupQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")
Ejemplo n.º 7
0
class Interface(object):

  def __init__(self, dataset_name, project_name, host_name=HOST_NAME):
    # self.resource_interface = ResourceInterface(dataset_name, project_name, host_name)
    self.cuboid_bucket = CuboidBucket(project_name)
    self.cuboidindex_db = CuboidIndexDB(project_name)

  # def deleteToken(self):
    # """Delete the Token"""
    # self.resource_interface.deleteToken()
    # print 'Delete successful for token {}'.format(self.token)

  def deleteProject(self):
    """Delete the project"""
    return NotImplemented 
    # delete the project from s3 and dynamo
    # self.s3_projdb.deleteNDProject()
    # deleting the meta-data via resource interface
    # self.resource_interface.deleteToken()
    # self.resource_interface.deleteProject()
    # print 'Delete successful for project {}'.format(self.project_name)
  
  
  def deleteChannel(self, channel_name):
    """Delete the channel"""
    
    try:
      for item in self.cuboidindex_db.queryChannelItems(channel_name):
        self.cuboid_bucket.deleteObject(item['supercuboid_key'])
        self.cuboidindex_db.deleteItem(item['supercuboid_key'])
    except Exception as e:
      print (e)
      
    # delete the channel from s3 and dynamo
    # self.s3_projdb.deleteNDChannel(channel_name)
    # deleting the meta-data via resource interface
    # self.resource_interface.deleteChannel(channel_name)
    # print 'Delete successful for channel {}'.format(channel_name)


  def deleteResolution(self, channel_name, resolution):
    """Delete an existing resolution"""
    
    try:
      for item in self.cuboidindex_db.queryResolutionItems(channel_name, resolution):
        print(item['supercuboid_key'])
        self.cuboid_bucket.deleteObject(item['supercuboid_key'])
        self.cuboidindex_db.deleteItem(item['supercuboid_key'])
    except Exception as e:
      print (e)
Ejemplo n.º 8
0
    def __init__(self, token_name, host_name=HOST_NAME):
        """Create the bucket and intialize values"""

        # configuring the logger based on the dataset we are uploading
        self.logger = logging.getLogger(token_name)
        self.logger.setLevel(logging.INFO)
        fh = logging.FileHandler('{}.log'.format(token_name))
        self.logger.addHandler(fh)
        # setting up the project metadata
        self.info_interface = InfoInterface(host_name, token_name)
        # creating the resource interface to the remote server
        # self.resource_interface = ResourceInterface(self.info_interface.dataset_name, self.info_interface.project_name, host_name, logger=self.logger)
        # self.proj = self.resource_interface.getProject()
        # create the s3 I/O and index objects
        self.cuboidindex_db = CuboidIndexDB(self.info_interface.project_name)
        self.cuboid_bucket = CuboidBucket(self.info_interface.project_name)
Ejemplo n.º 9
0
    def __init__(self, db):
        """Connect to the S3 backend"""

        try:
            self.db = db
            self.project_name = self.db.proj.project_name
            self.cuboidindex_db = CuboidIndexDB(self.project_name)
            self.cuboid_bucket = CuboidBucket(self.project_name)
        except Exception, e:
            logger.error("Cannot connect to S3 backend")
            raise SpatialDBError("Cannot connect to S3 backend")
Ejemplo n.º 10
0
def createTestDB ( project_name, channel_list=['unit_anno'], channel_type=ANNOTATION, channel_datatype=UINT32, public=PUBLIC_TRUE, ximagesize=10000, yimagesize=10000, zimagesize=1000, xvoxelres=4.0, yvoxelres=4.0, zvoxelres=3.0, scalingoption=ZSLICES, scalinglevels=5, readonly=READONLY_FALSE, propagate=NOT_PROPAGATED, window=[0,0], time=[0,15], default=False, nd_version=ND_VERSION, token_name='unittest', user='******', dataset_name="unittest", base_resolution=0):
  """Create a unit test data base on the specified sit and name"""
  
  # setting s3backend to true if Redis and creating s3 bucket and dynamo table
  if KV_ENGINE == REDIS:
    s3backend = S3_TRUE
    CuboidIndexDB.createTable()
    CuboidBucket.createBucket()    
  else:
    s3backend = S3_FALSE

  unituser = User.objects.get(username=user)

  ds = NDDataset(Dataset ( dataset_name=dataset_name, user=unituser, ximagesize=ximagesize, yimagesize=yimagesize, zimagesize=zimagesize,  xoffset=0, yoffset=0, zoffset=1, xvoxelres=xvoxelres, yvoxelres=yvoxelres, zvoxelres=zvoxelres, scalingoption=scalingoption, scalinglevels=scalinglevels, public=PUBLIC_TRUE, dataset_description="Unit test" ) )
  ds.create()

  # make the project entry
  pr = NDProject(Project(project_name=project_name, project_description='Unit test', user=unituser, dataset=ds._ds, nd_version=nd_version, host='localhost', kvengine=KV_ENGINE, kvserver=KV_SERVER, s3backend=s3backend))
  pr.create()

  # create a token
  tk = NDToken(Token (token_name = token_name, user = unituser, token_description = 'Unit test token', project_id = pr.project_name, public = public))
  tk.create()
  
  # get the correct object for the kvengine
  # pd = NDProjectsDB.getProjDB(NDProjectpr)
  # create the database
  # pd.newNDProject()

  try:
    for channel_name in channel_list:
      ch = NDChannel(Channel (channel_name=channel_name, channel_type=channel_type, channel_datatype=channel_datatype, channel_description='Unit test channel', project_id=pr.project_name, readonly=readonly, propagate=propagate, resolution=base_resolution, exceptions=1, starttime=time[0], endtime=time[1]  ,startwindow=window[0], endwindow=window[1], default=default))
      # create a channel
      ch.create()
      # create the channel table
      # pd.newNDChannel(ch.channel_name)
  except Exception, e:
      print(e)
      raise e
Ejemplo n.º 11
0
def deleteTestDB ( project_name, token_name='unittest' ):
  

  try:
    # get the objects
    tk = NDToken.fromName(token_name)
    tk.delete()
    pr = NDProject.fromName(project_name)
    ds = pr.datasetcfg
    # tk = Token.objects.get(token_name=token_name)
    # pr = Project.objects.get(project_name=project_name)
    # ds = Dataset.objects.get(dataset_name=pr.dataset_id)
    
    # get the channel list
    # channel_list = Channel.objects.filter(project_id=pr)
    
    # get the correct object for the kvengine
    # pd = NDProjectsDB.getProjDB(pr)
    
    for ch in pr.projectChannels():
      ch.delete()
      # delete the channel table
      # pd.deleteNDChannel(ch.channel_name)
      # delete the channel
      # ch.delete()
    # delete the project database
    # pd.deleteNDProject()
    # delete the objects
    pr.delete()
    ds.delete()
    
    # delete s3 bucket and dynamo table
    if KV_ENGINE == REDIS:
      CuboidIndexDB.deleteTable()
      CuboidBucket.deleteBucket()
  except Exception, e:
    print(e)
    raise e
Ejemplo n.º 12
0
class S3Cuboid(object):

  def __init__(self, token_name, host_name=HOST_NAME):
    # configuring the logger based on the dataset we are uploading
    self.logger = logging.getLogger(token_name)
    self.logger.setLevel(logging.INFO)
    fh = logging.FileHandler('{}_upload.log'.format(token_name))
    self.logger.addHandler(fh)

    self.info_interface = InfoInterface(host_name, token_name)
    self.project_name = self.info_interface.project_name
    self.cuboidindex_db = CuboidIndexDB(self.project_name)
    self.cuboid_bucket = CuboidBucket(self.project_name)


  def upload(self, file_name, channel_name, resolution, x_index, y_index, z_index, dimensions=[1, 64, 512,512], time_index=0, neariso=False):
    """Upload a 4D supercuboid directly to dynamo and s3"""
    cuboid_data = np.fromfile(file_name, dtype=self.info_interface.get_channel_datatype(channel_name))
    cuboid_data = cuboid_data.reshape(dimensions)
    super_zidx = XYZMorton([x_index, y_index, z_index])
    self.logger.info("Inserting cube {},{},{}".format(x_index, y_index, z_index))
    self.cuboidindex_db.putItem(channel_name, resolution, x_index, y_index, z_index, time_index, neariso=neariso)
    self.cuboid_bucket.putObject(channel_name, resolution, super_zidx, time_index, blosc.pack_array(cuboid_data), neariso=neariso)
Ejemplo n.º 13
0
def deleteTestDB(project_name, token_name='unittest'):

    try:
        # get the objects
        tk = NDToken.fromName(token_name)
        tk.delete()
        pr = NDProject.fromName(project_name)
        ds = pr.datasetcfg
        # tk = Token.objects.get(token_name=token_name)
        # pr = Project.objects.get(project_name=project_name)
        # ds = Dataset.objects.get(dataset_name=pr.dataset_id)

        # get the channel list
        # channel_list = Channel.objects.filter(project_id=pr)

        # get the correct object for the kvengine
        # pd = NDProjectsDB.getProjDB(pr)

        for ch in pr.projectChannels():
            ch.delete()
            # delete the channel table
            # pd.deleteNDChannel(ch.channel_name)
            # delete the channel
            # ch.delete()
        # delete the project database
        # pd.deleteNDProject()
        # delete the objects
        pr.delete()
        ds.delete()

        # delete s3 bucket and dynamo table
        if KV_ENGINE == REDIS:
            CuboidIndexDB.deleteTable()
            CuboidBucket.deleteBucket()
    except Exception, e:
        print(e)
        raise e
Ejemplo n.º 14
0
 def __init__(self, token_name, host_name=HOST_NAME):
   """Create the bucket and intialize values"""
 
   # configuring the logger based on the dataset we are uploading
   self.logger = logging.getLogger(token_name)
   self.logger.setLevel(logging.INFO)
   fh = logging.FileHandler('{}.log'.format(token_name))
   self.logger.addHandler(fh)
   # setting up the project metadata
   self.info_interface = InfoInterface(host_name, token_name)
   # creating the resource interface to the remote server
   # self.resource_interface = ResourceInterface(self.info_interface.dataset_name, self.info_interface.project_name, host_name, logger=self.logger)
   # self.proj = self.resource_interface.getProject()
   # create the s3 I/O and index objects
   self.cuboidindex_db = CuboidIndexDB(self.info_interface.project_name)
   self.cuboid_bucket = CuboidBucket(self.info_interface.project_name)
Ejemplo n.º 15
0
class S3ProjectDB:
  """Database for the projects"""

  def __init__(self, pr):
    """Create the database connection"""
    self.pr = pr
    # create connections for cuboid bucket and cuboid dyanmo table
    self.cuboid_bucket = CuboidBucket(self.pr.project_name, endpoint_url=ndingest_settings.S3_ENDPOINT)
    self.cuboidindex_db = CuboidIndexDB(self.pr.project_name, endpoint_url=ndingest_settings.DYNAMO_ENDPOINT)

  def __del__(self):
    """Close the database connection"""
    self.close()

  def close (self):
    """Close the database connection"""
    pass


  def newNDProject(self):
    """Create the database for a project."""
    pass
  

  def newNDChannel(self, channel_name):
    """Create the tables for a channel."""
    pass


  def deleteNDProject(self):
    """Delete a project in s3 and dyanmo"""
    
    try:
      for item in self.cuboidindex_db.queryProjectItems():
        self.cuboid_bucket.deleteObject(item['supercuboid_key'])
        self.cuboidindex_db.deleteItem(item['supercuboid_key'])
    except botocore.exceptions.ClientError as e:
      if e.response['Error']['Code'] == 'ResourceNotFoundException':
        logger.warning("Resource was not accessible {}".format(e))
        pass
      else:
        raise e
    except Exception as e:
      logger.error("Error in deleting S3 project {}. {}".format(self.pr.project_name, e))
      raise NDWSError("Error in deleting S3 project {}. {}".format(self.pr.project_name, e))


  def deleteNDChannel(self, channel_name):
    """Delete a channel in s3 and dynamo"""
    
    try:
      for item in self.cuboidindex_db.queryChannelItems(channel_name):
        self.cuboid_bucket.deleteObject(item['supercuboid_key'])
        self.cuboidindex_db.deleteItem(item['supercuboid_key'])
    except botocore.exceptions.ClientError as e:
      if e.response['Error']['Code'] == 'ResourceNotFoundException':
        logger.warning("Resource was not accessible {}".format(e))
        pass
      else:
        raise e
    except Exception as e:
      logger.error("Error in deleting S3 channel {}. {}".format(channel_name, e))
      raise NDWSError("Error in deleting S3 channel {}. {}".format(channel_name, e))

  def deleteNDResolution(self, channel_name, resolution):
    """Delete the resolution in s3 and dynamo"""

    try:
      for item in self.cuboidindex_db.queryResolutionItems(channel_name, resolution):
        self.cuboid_bucket.deleteObject(item['supercuboid_key'])
        self.cuboidindex_db.deleteItem(item['supercuboid_key'])
    except botocore.exceptions.ClientError as e:
      if e.response['Error']['Code'] == 'ResourceNotFoundException':
        logger.warning("Resource was not accessible {}".format(e))
        pass
      else:
        raise e
    except Exception as e:
      logger.error("Error in deleting S3 channel resolution {},{}. {}".format(channel_name, resolution, e))
      raise NDWSError("Error in deleting S3 channel resolution {},{}. {}".format(channel_name, resolution, e))
Ejemplo n.º 16
0
class AwsInterface:
    def __init__(self, token_name, host_name=HOST_NAME):
        """Create the bucket and intialize values"""

        # configuring the logger based on the dataset we are uploading
        self.logger = logging.getLogger(token_name)
        self.logger.setLevel(logging.INFO)
        fh = logging.FileHandler('{}.log'.format(token_name))
        self.logger.addHandler(fh)
        # setting up the project metadata
        self.info_interface = InfoInterface(host_name, token_name)
        # creating the resource interface to the remote server
        # self.resource_interface = ResourceInterface(self.info_interface.dataset_name, self.info_interface.project_name, host_name, logger=self.logger)
        # self.proj = self.resource_interface.getProject()
        # create the s3 I/O and index objects
        self.cuboidindex_db = CuboidIndexDB(self.info_interface.project_name)
        self.cuboid_bucket = CuboidBucket(self.info_interface.project_name)

    # def setupNewProject(self):
    # """Setup a new project if it does not exist"""

    # self.resource_interface.createDataset()
    # self.resource_interface.createProject()
    # self.resource_interface.createToken()

    def uploadExistingProject(self,
                              channel_name,
                              resolution,
                              start_values,
                              neariso=False):
        """Upload an existing project to S3"""

        self.setupNewProject()
        db = SpatialDB(self.proj)
        # checking for channels
        if channel_name is None:
            channel_list = None
        else:
            channel_list = [channel_name]

        # iterating over channels in a project
        for ch in self.proj.projectChannels(channel_list):

            # creating the channel resource
            self.resource_interface.createChannel(ch.channel_name)
            # ingest 1 or more resolutions based on user input
            if resolution is None:
                start_res = self.proj.datasetcfg.scalinglevels
                stop_res = ch.resolution - 1
            else:
                start_res = resolution
                stop_res = resolution - 1

            # iterating over resolution
            for cur_res in range(start_res, stop_res, -1):

                # get the source database sizes
                [image_size,
                 time_range] = self.proj.datasetcfg.dataset_dim(cur_res)
                [xcubedim, ycubedim, zcubedim
                 ] = cubedim = self.proj.datasetcfg.get_cubedim(cur_res)
                offset = self.proj.datasetcfg.get_offset(cur_res)
                [xsupercubedim, ysupercubedim, zsupercubedim
                 ] = supercubedim = self.proj.datasetcfg.get_supercubedim(
                     cur_res)
                # set the limits for iteration on the number of cubes in each dimension
                xlimit = (image_size[0] - 1) / (xsupercubedim) + 1
                ylimit = (image_size[1] - 1) / (ysupercubedim) + 1
                zlimit = (image_size[2] - 1) / (zsupercubedim) + 1
                # [xlimit, ylimit, zlimit] = limit = self.proj.datasetcfg.get_supercube_limit(cur_res)
                [x_start, y_start, z_start] = map(div, start_values,
                                                  supercubedim)
                for z in range(z_start, zlimit, 1):
                    for y in range(y_start, ylimit, 1):
                        for x in range(x_start, xlimit, 1):

                            try:
                                # cutout the data at the current resolution
                                data = db.cutout(ch, [
                                    x * xsupercubedim, y * ysupercubedim,
                                    z * zsupercubedim
                                ], [
                                    xsupercubedim, ysupercubedim, zsupercubedim
                                ], cur_res).data
                                # generate the morton index
                                morton_index = XYZMorton([x, y, z])

                                self.logger.info("[{},{},{}] at res {}".format(
                                    x * xsupercubedim, y * ysupercubedim,
                                    z * zsupercubedim, cur_res))
                                # updating the index
                                # self.cuboidindex_db.putItem(ch.channel_name, cur_res, x, y, z, ch.time_range[0])
                                # inserting the cube
                                self.s3_io.putCube(ch,
                                                   ch.time_stamp[0],
                                                   morton_index,
                                                   cur_res,
                                                   blosc.pack_array(data),
                                                   neariso=neariso)

                            except Exception as e:
                                # checkpoint the ingest
                                self.logger.error(e)
                                self.checkpoint_ingest(ch.channel_name,
                                                       cur_res, x, y, z, e)
                                raise e

    def uploadNewProject(self, config_file, start_values, neariso=False):
        """Upload a new project"""

        # loading the config file and assdociated params and processors
        config = Configuration()
        config.load(json.loads(open(config_file, 'rt').read()))
        config.load_plugins()
        path_processor = config.path_processor_class
        path_processor.setup(config.get_path_processor_params())
        tile_processor = config.tile_processor_class
        tile_processor.setup(config.get_tile_processor_params())
        tile_params = config.get_tile_processor_params()
        path_params = config.get_path_processor_params()

        # creating the channel object from resource service
        channel_name = config.config_data['database']['channel']
        channel_datatype = self.info_interface.get_channel_datatype(
            channel_name)
        cur_res = tile_params['ingest_job']['resolution']

        # loading all the parameters for image-sizes, tile-sizes, and iteration limits
        [xsupercubedim, ysupercubedim,
         zsupercubedim] = supercubedim = SUPER_CUBOID_SIZE
        [x_start, x_end] = tile_params['ingest_job']['extent']['x']
        [y_start, y_end] = tile_params['ingest_job']['extent']['y']
        [z_start, z_end] = tile_params['ingest_job']['extent']['z']
        [t_start, t_end] = tile_params['ingest_job']['extent']['t']
        x_tilesz = tile_params['ingest_job']['tile_size']['x']
        y_tilesz = tile_params['ingest_job']['tile_size']['y']
        z_tilesz = tile_params['ingest_job']['tile_size']['z']
        t_tilesz = tile_params['ingest_job']['tile_size']['t']
        x_limit = (x_end - 1) / (x_tilesz) + 1
        y_limit = (y_end - 1) / (y_tilesz) + 1
        z_limit = (z_end - 1) / (z_tilesz) + 1
        t_limit = (t_end - 1) / (t_tilesz) + 1

        if start_values != [0, 0, 0]:
            [x_start, y_start, z_start] = map(div, start_values,
                                              [x_tilesz, y_tilesz, z_tilesz])
        # iterate over t,z,y,x to ingest the data
        for t in range(t_start, t_limit, 1):
            for z in range(z_start, z_limit, zsupercubedim):
                for y in range(y_start, y_limit, 1):
                    for x in range(x_start, x_limit, 1):

                        data = np.zeros([zsupercubedim, y_tilesz, x_tilesz],
                                        dtype=ND_dtypetonp[channel_datatype])
                        for b in range(0, zsupercubedim, 1):
                            if z + b > z_end - 1:
                                break
                            # generate file name
                            file_name = path_processor.process(x, y, z + b, t)
                            # read the file, handle expection if the file is missing
                            try:
                                tile_handle = tile_processor.process(
                                    file_name, x, y, z + b, t)
                                tile_handle.seek(0)
                                data[b, :, :] = np.asarray(
                                    Image.open(tile_handle))
                            except IOError as e:
                                pass
                                # print "missing file", file_name
                        # iterate over the tile if it is larger then supercuboid size
                        for y_index in range(0, y_tilesz / ysupercubedim):
                            for x_index in range(0, x_tilesz / xsupercubedim):
                                # calculate the morton index
                                insert_data = data[:, y_index *
                                                   ysupercubedim:(y_index +
                                                                  1) *
                                                   ysupercubedim, x_index *
                                                   xsupercubedim:(x_index +
                                                                  1) *
                                                   xsupercubedim]
                                if np.any(insert_data):
                                    morton_index = XYZMorton([
                                        x_index +
                                        (x * x_tilesz / xsupercubedim),
                                        y_index +
                                        (y * y_tilesz / ysupercubedim),
                                        z / zsupercubedim
                                    ])
                                    [s3_x, s3_y,
                                     s3_z] = MortonXYZ(morton_index)
                                    print "Morton Index {}".format(
                                        morton_index)
                                    self.logger.info("[{},{},{}]".format(
                                        (x_index + x) * x_tilesz,
                                        (y_index + y) * y_tilesz, z))
                                    self.cuboidindex_db.putItem(
                                        channel_name,
                                        cur_res,
                                        s3_x,
                                        s3_y,
                                        s3_z,
                                        t,
                                        neariso=neariso)
                                    self.cuboid_bucket.putObject(
                                        channel_name,
                                        cur_res,
                                        morton_index,
                                        t,
                                        blosc.pack_array(insert_data),
                                        neariso=neariso)
                                    # self.s3_io.putCube(ch, t, morton_index, cur_res, blosc.pack_array(insert_data), update=False, neariso=False)

    def checkpoint_ingest(self, channel_name, resolution, x, y, z, e, time=0):
        """Checkpoint the progress to file"""

        with closing(open('checkpoint_ingest.csv', 'wb')) as csv_file:
            field_names = [
                'project_name', 'channel_name', 'resolution', 'x', 'y', 'z',
                'time', 'exception'
            ]
            csv_writer = csv.DictWriter(csv_file,
                                        delimiter=',',
                                        fieldnames=field_names)
            csv_writer.writeheader()
            csv_writer.writerow({
                'project_name': self.proj.project_name,
                'channel_name': channel_name,
                'resolution': resolution,
                'x': x,
                'y': y,
                'z': z,
                'time': time,
                'exception': e.message
            })

    def load_checkpoint(self):
        """Load from a checkpoint file"""
        return NotImplemented
Ejemplo n.º 17
0
 def __init__(self, dataset_name, project_name, host_name=HOST_NAME):
   # self.resource_interface = ResourceInterface(dataset_name, project_name, host_name)
   self.cuboid_bucket = CuboidBucket(project_name)
   self.cuboidindex_db = CuboidIndexDB(project_name)
Ejemplo n.º 18
0
from nduser.models import Project
from nduser.models import Dataset
from nduser.models import Token
from nduser.models import Channel
from ndproj.nddataset import NDDataset
from ndproj.ndproject import NDProject
from ndproj.ndchannel import NDChannel
from ndproj.ndtoken import NDToken
from ndlib.ndtype import *
from test_settings import *
if KV_ENGINE == REDIS:
  from ndingest.nddynamo.cuboidindexdb import CuboidIndexDB
  from ndingest.ndbucket.cuboidbucket import CuboidBucket
  try:
    CuboidIndexDB.deleteTable()
    CuboidBucket.deleteBucket()
  except Exception as e:
    pass

def createTestDB ( project_name, channel_list=['unit_anno'], channel_type=ANNOTATION, channel_datatype=UINT32, public=PUBLIC_TRUE, ximagesize=10000, yimagesize=10000, zimagesize=1000, xvoxelres=4.0, yvoxelres=4.0, zvoxelres=3.0, scalingoption=ZSLICES, scalinglevels=5, readonly=READONLY_FALSE, propagate=NOT_PROPAGATED, window=[0,0], time=[0,15], default=False, nd_version=ND_VERSION, token_name='unittest', user='******', dataset_name="unittest", base_resolution=0):
  """Create a unit test data base on the specified sit and name"""
  
  # setting s3backend to true if Redis and creating s3 bucket and dynamo table
  if KV_ENGINE == REDIS:
    s3backend = S3_TRUE
    CuboidIndexDB.createTable()
    CuboidBucket.createBucket()    
  else:
    s3backend = S3_FALSE

  unituser = User.objects.get(username=user)
Ejemplo n.º 19
0
class AwsInterface:

  def __init__(self, token_name, host_name=HOST_NAME):
    """Create the bucket and intialize values"""
  
    # configuring the logger based on the dataset we are uploading
    self.logger = logging.getLogger(token_name)
    self.logger.setLevel(logging.INFO)
    fh = logging.FileHandler('{}.log'.format(token_name))
    self.logger.addHandler(fh)
    # setting up the project metadata
    self.info_interface = InfoInterface(host_name, token_name)
    # creating the resource interface to the remote server
    # self.resource_interface = ResourceInterface(self.info_interface.dataset_name, self.info_interface.project_name, host_name, logger=self.logger)
    # self.proj = self.resource_interface.getProject()
    # create the s3 I/O and index objects
    self.cuboidindex_db = CuboidIndexDB(self.info_interface.project_name)
    self.cuboid_bucket = CuboidBucket(self.info_interface.project_name)
  

  
  # def setupNewProject(self):
    # """Setup a new project if it does not exist"""
    
    # self.resource_interface.createDataset()
    # self.resource_interface.createProject()
    # self.resource_interface.createToken()
  

  def uploadExistingProject(self, channel_name, resolution, start_values, neariso=False):
    """Upload an existing project to S3"""
      
    self.setupNewProject()
    db = SpatialDB(self.proj)
    # checking for channels
    if channel_name is None:
      channel_list = None
    else:
      channel_list = [channel_name]
    
    # iterating over channels in a project
    for ch in self.proj.projectChannels(channel_list):
      
      # creating the channel resource
      self.resource_interface.createChannel(ch.channel_name)
      # ingest 1 or more resolutions based on user input
      if resolution is None:
        start_res = self.proj.datasetcfg.scalinglevels
        stop_res = ch.resolution - 1
      else:
        start_res = resolution
        stop_res = resolution - 1
      
      # iterating over resolution
      for cur_res in range(start_res, stop_res, -1):
        
        # get the source database sizes
        [image_size, time_range] = self.proj.datasetcfg.dataset_dim(cur_res)
        [xcubedim, ycubedim, zcubedim] = cubedim = self.proj.datasetcfg.get_cubedim(cur_res)
        offset = self.proj.datasetcfg.get_offset(cur_res)
        [xsupercubedim, ysupercubedim, zsupercubedim] = supercubedim = self.proj.datasetcfg.get_supercubedim(cur_res)
        # set the limits for iteration on the number of cubes in each dimension
        xlimit = (image_size[0]-1) / (xsupercubedim) + 1
        ylimit = (image_size[1]-1) / (ysupercubedim) + 1
        zlimit = (image_size[2]-1) / (zsupercubedim) + 1
        # [xlimit, ylimit, zlimit] = limit = self.proj.datasetcfg.get_supercube_limit(cur_res)
        [x_start, y_start, z_start] = map(div, start_values, supercubedim)
        for z in range(z_start, zlimit, 1):
          for y in range(y_start, ylimit, 1):
            for x in range(x_start, xlimit, 1):

              try:
                # cutout the data at the current resolution
                data = db.cutout(ch, [x*xsupercubedim, y*ysupercubedim, z*zsupercubedim], [xsupercubedim, ysupercubedim, zsupercubedim], cur_res).data
                # generate the morton index
                morton_index = XYZMorton([x, y, z])

                self.logger.info("[{},{},{}] at res {}".format(x*xsupercubedim, y*ysupercubedim, z*zsupercubedim, cur_res))
                # updating the index
                # self.cuboidindex_db.putItem(ch.channel_name, cur_res, x, y, z, ch.time_range[0])
                # inserting the cube
                self.s3_io.putCube(ch, ch.time_stamp[0], morton_index, cur_res, blosc.pack_array(data), neariso=neariso)
              
              except Exception as e:
                # checkpoint the ingest
                self.logger.error(e)
                self.checkpoint_ingest(ch.channel_name, cur_res, x, y, z, e)
                raise e
  
  
  def uploadNewProject(self, config_file, start_values, neariso=False):
    """Upload a new project"""
    
    # loading the config file and assdociated params and processors
    config = Configuration()
    config.load(json.loads(open(config_file, 'rt').read()))
    config.load_plugins()
    path_processor = config.path_processor_class
    path_processor.setup(config.get_path_processor_params())
    tile_processor = config.tile_processor_class
    tile_processor.setup(config.get_tile_processor_params())
    tile_params = config.get_tile_processor_params()
    path_params = config.get_path_processor_params()
    
    # creating the channel object from resource service
    channel_name = config.config_data['database']['channel']
    channel_datatype = self.info_interface.get_channel_datatype(channel_name)
    cur_res = tile_params['ingest_job']['resolution']
    
    # loading all the parameters for image-sizes, tile-sizes, and iteration limits
    [xsupercubedim, ysupercubedim, zsupercubedim] = supercubedim = SUPER_CUBOID_SIZE
    [x_start, x_end] = tile_params['ingest_job']['extent']['x']
    [y_start, y_end] = tile_params['ingest_job']['extent']['y']
    [z_start, z_end] = tile_params['ingest_job']['extent']['z']
    [t_start, t_end] = tile_params['ingest_job']['extent']['t']
    x_tilesz = tile_params['ingest_job']['tile_size']['x']
    y_tilesz = tile_params['ingest_job']['tile_size']['y']
    z_tilesz = tile_params['ingest_job']['tile_size']['z']
    t_tilesz = tile_params['ingest_job']['tile_size']['t']
    x_limit = (x_end-1) / (x_tilesz) + 1
    y_limit = (y_end-1) / (y_tilesz) + 1
    z_limit = (z_end-1) / (z_tilesz) + 1
    t_limit = (t_end-1) / (t_tilesz) + 1
    
    if start_values != [0, 0, 0]:
      [x_start, y_start, z_start] = map(div, start_values, [x_tilesz, y_tilesz, z_tilesz])
    # iterate over t,z,y,x to ingest the data
    for t in range(t_start, t_limit, 1):  
      for z in range(z_start, z_limit, zsupercubedim):
        for y in range(y_start, y_limit, 1):
          for x in range(x_start, x_limit, 1):
            
            data = np.zeros([zsupercubedim, y_tilesz, x_tilesz], dtype=ND_dtypetonp[channel_datatype])
            for b in range(0, zsupercubedim, 1):
              if z + b > z_end - 1:
                break
              # generate file name
              file_name = path_processor.process(x, y, z+b, t)
              # read the file, handle expection if the file is missing
              try:
                tile_handle = tile_processor.process(file_name, x, y, z+b, t)
                tile_handle.seek(0)
                data[b,:,:] = np.asarray(Image.open(tile_handle))
              except IOError as e:
                pass
                # print "missing file", file_name
            # iterate over the tile if it is larger then supercuboid size
            for y_index in range(0, y_tilesz/ysupercubedim):
              for x_index in range(0, x_tilesz/xsupercubedim):
                # calculate the morton index 
                insert_data = data[:, y_index*ysupercubedim:(y_index+1)*ysupercubedim, x_index*xsupercubedim:(x_index+1)*xsupercubedim]
                if np.any(insert_data):
                  morton_index = XYZMorton([x_index+(x*x_tilesz/xsupercubedim), y_index+(y*y_tilesz/ysupercubedim), z/zsupercubedim])
                  [s3_x, s3_y, s3_z] = MortonXYZ(morton_index)
                  print "Morton Index {}".format(morton_index)
                  self.logger.info("[{},{},{}]".format((x_index+x)*x_tilesz, (y_index+y)*y_tilesz, z))
                  self.cuboidindex_db.putItem(channel_name, cur_res, s3_x, s3_y, s3_z, t, neariso=neariso)
                  self.cuboid_bucket.putObject(channel_name, cur_res, morton_index, t, blosc.pack_array(insert_data), neariso=neariso)
                  # self.s3_io.putCube(ch, t, morton_index, cur_res, blosc.pack_array(insert_data), update=False, neariso=False)


  def checkpoint_ingest(self, channel_name, resolution, x, y, z, e, time=0):
    """Checkpoint the progress to file"""
    
    with closing(open('checkpoint_ingest.csv', 'wb')) as csv_file:
      field_names = ['project_name', 'channel_name', 'resolution', 'x', 'y', 'z', 'time', 'exception']
      csv_writer = csv.DictWriter(csv_file, delimiter=',', fieldnames=field_names)
      csv_writer.writeheader()
      csv_writer.writerow({'project_name' : self.proj.project_name, 'channel_name' : channel_name, 'resolution' : resolution, 'x' : x, 'y' : y, 'z' : z, 'time' : time, 'exception' : e.message})

  
  def load_checkpoint(self):
    """Load from a checkpoint file"""
    return NotImplemented
Ejemplo n.º 20
0
class S3ProjectDB:
    """Database for the projects"""
    def __init__(self, pr):
        """Create the database connection"""
        self.pr = pr
        # create connections for cuboid bucket and cuboid dyanmo table
        self.cuboid_bucket = CuboidBucket(
            self.pr.project_name, endpoint_url=ndingest_settings.S3_ENDPOINT)
        self.cuboidindex_db = CuboidIndexDB(
            self.pr.project_name,
            endpoint_url=ndingest_settings.DYNAMO_ENDPOINT)

    def __del__(self):
        """Close the database connection"""
        self.close()

    def close(self):
        """Close the database connection"""
        pass

    def newNDProject(self):
        """Create the database for a project."""
        pass

    def newNDChannel(self, channel_name):
        """Create the tables for a channel."""
        pass

    def deleteNDProject(self):
        """Delete a project in s3 and dyanmo"""

        try:
            for item in self.cuboidindex_db.queryProjectItems():
                self.cuboid_bucket.deleteObject(item['supercuboid_key'])
                self.cuboidindex_db.deleteItem(item['supercuboid_key'])
        except botocore.exceptions.ClientError as e:
            if e.response['Error']['Code'] == 'ResourceNotFoundException':
                logger.warning("Resource was not accessible {}".format(e))
                pass
            else:
                raise e
        except Exception as e:
            logger.error("Error in deleting S3 project {}. {}".format(
                self.pr.project_name, e))
            raise NDWSError("Error in deleting S3 project {}. {}".format(
                self.pr.project_name, e))

    def deleteNDChannel(self, channel_name):
        """Delete a channel in s3 and dynamo"""

        try:
            for item in self.cuboidindex_db.queryChannelItems(channel_name):
                self.cuboid_bucket.deleteObject(item['supercuboid_key'])
                self.cuboidindex_db.deleteItem(item['supercuboid_key'])
        except botocore.exceptions.ClientError as e:
            if e.response['Error']['Code'] == 'ResourceNotFoundException':
                logger.warning("Resource was not accessible {}".format(e))
                pass
            else:
                raise e
        except Exception as e:
            logger.error("Error in deleting S3 channel {}. {}".format(
                channel_name, e))
            raise NDWSError("Error in deleting S3 channel {}. {}".format(
                channel_name, e))

    def deleteNDResolution(self, channel_name, resolution):
        """Delete the resolution in s3 and dynamo"""

        try:
            for item in self.cuboidindex_db.queryResolutionItems(
                    channel_name, resolution):
                self.cuboid_bucket.deleteObject(item['supercuboid_key'])
                self.cuboidindex_db.deleteItem(item['supercuboid_key'])
        except botocore.exceptions.ClientError as e:
            if e.response['Error']['Code'] == 'ResourceNotFoundException':
                logger.warning("Resource was not accessible {}".format(e))
                pass
            else:
                raise e
        except Exception as e:
            logger.error(
                "Error in deleting S3 channel resolution {},{}. {}".format(
                    channel_name, resolution, e))
            raise NDWSError(
                "Error in deleting S3 channel resolution {},{}. {}".format(
                    channel_name, resolution, e))
Ejemplo n.º 21
0
# Copyright 2014 NeuroData (http://neurodata.io)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import sys

sys.path += [os.path.abspath('../django')]
import ND.settings

os.environ['DJANGO_SETTINGS_MODULE'] = 'ND.settings'
from ndingest.nddynamo.cuboidindexdb import CuboidIndexDB
from ndingest.ndbucket.cuboidbucket import CuboidBucket

# setup cuboid indext table and bucket locally
CuboidIndexDB.createTable()
CuboidBucket.createBucket()
Ejemplo n.º 22
0
 def __init__(self, pr):
   """Create the database connection"""
   self.pr = pr
   # create connections for cuboid bucket and cuboid dyanmo table
   self.cuboid_bucket = CuboidBucket(self.pr.project_name, endpoint_url=ndingest_settings.S3_ENDPOINT)
   self.cuboidindex_db = CuboidIndexDB(self.pr.project_name, endpoint_url=ndingest_settings.DYNAMO_ENDPOINT)
Ejemplo n.º 23
0
def createTestDB(project_name,
                 channel_list=['unit_anno'],
                 channel_type=ANNOTATION,
                 channel_datatype=UINT32,
                 public=PUBLIC_TRUE,
                 ximagesize=10000,
                 yimagesize=10000,
                 zimagesize=1000,
                 xvoxelres=4.0,
                 yvoxelres=4.0,
                 zvoxelres=3.0,
                 scalingoption=ZSLICES,
                 scalinglevels=5,
                 readonly=READONLY_FALSE,
                 propagate=NOT_PROPAGATED,
                 window=[0, 0],
                 time=[0, 15],
                 default=False,
                 nd_version=ND_VERSION,
                 token_name='unittest',
                 user='******',
                 dataset_name="unittest",
                 base_resolution=0):
    """Create a unit test data base on the specified sit and name"""

    # setting s3backend to true if Redis and creating s3 bucket and dynamo table
    if KV_ENGINE == REDIS:
        s3backend = S3_TRUE
        CuboidIndexDB.createTable()
        CuboidBucket.createBucket()
    else:
        s3backend = S3_FALSE

    unituser = User.objects.get(username=user)

    ds = NDDataset(
        Dataset(dataset_name=dataset_name,
                user=unituser,
                ximagesize=ximagesize,
                yimagesize=yimagesize,
                zimagesize=zimagesize,
                xoffset=0,
                yoffset=0,
                zoffset=1,
                xvoxelres=xvoxelres,
                yvoxelres=yvoxelres,
                zvoxelres=zvoxelres,
                scalingoption=scalingoption,
                scalinglevels=scalinglevels,
                public=PUBLIC_TRUE,
                dataset_description="Unit test"))
    ds.create()

    # make the project entry
    pr = NDProject(
        Project(project_name=project_name,
                project_description='Unit test',
                user=unituser,
                dataset=ds._ds,
                nd_version=nd_version,
                host='localhost',
                kvengine=KV_ENGINE,
                kvserver=KV_SERVER,
                s3backend=s3backend))
    pr.create()

    # create a token
    tk = NDToken(
        Token(token_name=token_name,
              user=unituser,
              token_description='Unit test token',
              project_id=pr.project_name,
              public=public))
    tk.create()

    # get the correct object for the kvengine
    # pd = NDProjectsDB.getProjDB(NDProjectpr)
    # create the database
    # pd.newNDProject()

    try:
        for channel_name in channel_list:
            ch = NDChannel(
                Channel(channel_name=channel_name,
                        channel_type=channel_type,
                        channel_datatype=channel_datatype,
                        channel_description='Unit test channel',
                        project_id=pr.project_name,
                        readonly=readonly,
                        propagate=propagate,
                        resolution=base_resolution,
                        exceptions=1,
                        starttime=time[0],
                        endtime=time[1],
                        startwindow=window[0],
                        endwindow=window[1],
                        default=default))
            # create a channel
            ch.create()
            # create the channel table
            # pd.newNDChannel(ch.channel_name)
    except Exception, e:
        print(e)
        raise e
Ejemplo n.º 24
0
from nduser.models import Project
from nduser.models import Dataset
from nduser.models import Token
from nduser.models import Channel
from ndproj.nddataset import NDDataset
from ndproj.ndproject import NDProject
from ndproj.ndchannel import NDChannel
from ndproj.ndtoken import NDToken
from ndlib.ndtype import *
from test_settings import *
if KV_ENGINE == REDIS:
    from ndingest.nddynamo.cuboidindexdb import CuboidIndexDB
    from ndingest.ndbucket.cuboidbucket import CuboidBucket
    try:
        CuboidIndexDB.deleteTable()
        CuboidBucket.deleteBucket()
    except Exception as e:
        pass


def createTestDB(project_name,
                 channel_list=['unit_anno'],
                 channel_type=ANNOTATION,
                 channel_datatype=UINT32,
                 public=PUBLIC_TRUE,
                 ximagesize=10000,
                 yimagesize=10000,
                 zimagesize=1000,
                 xvoxelres=4.0,
                 yvoxelres=4.0,
                 zvoxelres=3.0,
Ejemplo n.º 25
0
class Test_IngestLambda:
    def setup_class(self):
        """Setup class parameters"""

        # create the tile index table. skip if it exists
        try:
            TileIndexDB.createTable(endpoint_url="http://localhost:8000")
            CuboidIndexDB.createTable(endpoint_url="http://localhost:8000")
        except Exception as e:
            pass
        self.tileindex_db = TileIndexDB(nd_proj.project_name,
                                        endpoint_url="http://localhost:8000")

        # create the tile bucket
        TileBucket.createBucket(endpoint_url="http://localhost:4567")
        self.tile_bucket = TileBucket(nd_proj.project_name,
                                      endpoint_url="http://localhost:4567")
        self.tiles = [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0]

        message_id = "testing"
        receipt_handle = "123456"
        # insert SUPER_CUBOID_SIZE tiles in the bucket
        for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1):
            tile_handle = cStringIO.StringIO()
            self.tile_bucket.putObject(
                tile_handle,
                nd_proj.channel_name,
                nd_proj.resolution,
                self.x_tile,
                self.y_tile,
                z_index,
                message_id,
                receipt_handle,
            )

        # creating the cuboid bucket
        CuboidBucket.createBucket(endpoint_url="http://localhost:4567")
        self.cuboid_bucket = CuboidBucket(nd_proj.project_name,
                                          endpoint_url="http://localhost:4567")

        # create the ingest queue
        IngestQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")
        self.ingest_queue = IngestQueue(nd_proj,
                                        endpoint_url="http://localhost:4568")

        # send message to the ingest queue
        morton_index = XYZMorton(self.tiles)
        supercuboid_key = self.cuboid_bucket.generateSupercuboidKey(
            nd_proj.channel_name, nd_proj.resolution, morton_index)
        response = self.ingest_queue.sendMessage(supercuboid_key)

        # create the cleanup queue
        CleanupQueue.createQueue(nd_proj, endpoint_url="http://localhost:4568")

    def teardown_class(self):
        """Teardown class parameters"""

        # cleanup tilebucket
        for z_index in (self.z_tile, settings.SUPER_CUBOID_SIZE[2], 1):
            tile_key = self.tile_bucket.encodeObjectKey(
                nd_proj.channel_name,
                nd_proj.resolution,
                self.x_tile,
                self.y_tile,
                z_index,
            )
            self.tile_bucket.deleteObject(tile_key)

        morton_index = XYZMorton(self.tiles)
        supercuboid_key = self.cuboid_bucket.generateSupercuboidKey(
            nd_proj.channel_name, nd_proj.resolution, self.tiles)
        self.cuboid_bucket.deleteObject(supercuboid_key)
        # delete created entities
        TileIndexDB.deleteTable(endpoint_url="http://localhost:8000")
        CuboidIndexDB.deleteTable(endpoint_url="http://localhost:8000")
        IngestQueue.deleteQueue(nd_proj, endpoint_url="http://localhost:4568")
        CleanupQueue.deleteQueue(nd_proj, endpoint_url="http://localhost:4568")
        TileBucket.deleteBucket(endpoint_url="http://localhost:4567")
        try:
            CuboidBucket.deleteBucket(endpoint_url="http://localhost:4567")
        except Exception as e:
            pass

    def test_Uploadevent(self):
        """Testing the event"""
        # creating an emulambda function
        func = emulambda.import_lambda("ingestlambda.lambda_handler")
        # creating an emulambda event
        event = emulambda.parse_event(
            open("../ndlambda/functions/ingest/ingest_event.json").read())
        # calling the emulambda function to invoke a lambda
        emulambda.invoke_lambda(func, event, None, 0, None)

        # testing if the supercuboid was inserted in the bucket
        morton_index = XYZMorton(self.tiles)
        cuboid = self.cuboid_bucket.getObject(nd_proj.channel_name,
                                              nd_proj.resolution, morton_index)

        # testing if the message was removed from the ingest queue
        for message in self.ingest_queue.receiveMessage():
            # KL TODO write the message id into the JSON event file directly
            print(message)
Ejemplo n.º 26
0
class Test_Cuboid_Bucket:
    def setup_class(self):
        """Setup Parameters"""
        if "S3_ENDPOINT" in dir(settings):
            self.endpoint_url = settings.S3_ENDPOINT
        else:
            self.endpoint_url = None
        CuboidBucket.createBucket(endpoint_url=self.endpoint_url)
        self.cuboid_bucket = CuboidBucket(
            nd_proj.project_name, endpoint_url=self.endpoint_url
        )

    def teardown_class(self):
        """Teardown Parameters"""

        # Ensure bucket empty before deleting.
        for objs in self.cuboid_bucket.getAllObjects():
            self.cuboid_bucket.deleteObject(objs.key)

        CuboidBucket.deleteBucket(endpoint_url=self.endpoint_url)

    @pytest.mark.skipif(
        settings.PROJECT_NAME == "Boss", reason="putObject() not supported by the Boss"
    )
    def test_put_object(self):
        """Testing put object"""
        cube_data = blosc.pack_array(np.zeros(settings.SUPER_CUBOID_SIZE))
        for morton_index in range(0, 10, 1):
            self.cuboid_bucket.putObject(
                nd_proj.channel_name, nd_proj.resolution, morton_index, cube_data
            )

        for morton_index in range(0, 10, 1):
            supercuboid_key = self.cuboid_bucket.generateSupercuboidKey(
                nd_proj.channel_name, nd_proj.resolution, morton_index
            )
            self.cuboid_bucket.deleteObject(supercuboid_key)

    def test_put_object_by_key(self):
        hashm = hashlib.md5()
        hashm.update(b"test_cuboidbucket_data")
        cube_data = blosc.pack_array(np.zeros(settings.SUPER_CUBOID_SIZE))

        for morton_index in range(0, 10, 1):
            key = "{}&{}".format(hashm.hexdigest(), morton_index)
            self.cuboid_bucket.putObjectByKey(key, cube_data)

        for morton_index in range(0, 10, 1):
            key = "{}&{}".format(hashm.hexdigest(), morton_index)
            self.cuboid_bucket.deleteObject(key)