コード例 #1
0
    def send_upload_task_message(self, msg):
        """

        Args:
            msg:

        Returns:

        """
        queue = UploadQueue(self.nd_proj, endpoint_url=None)
        queue.sendMessage(msg)
コード例 #2
0
    def send_upload_task_message(self, msg):
        """
        Upload one message to the upload queue
        (Note : Currently not used. Replaced with the send_upload_message_batch)
        Args:
            msg: Message to send to the upload queue

        Returns:
            None

        """
        queue = UploadQueue(self.nd_proj, endpoint_url=None)
        queue.sendMessage(msg)
コード例 #3
0
ファイル: ndworker.py プロジェクト: aplbrain/ndingest
  def populateQueue(self, tile_size=1024, time_interval=1):
    """Populate the message queue"""
    
    # setup the queue
    queue_name = UploadQueue.createQueue([self.proj.getProjectName(), self.channel, str(self.resolution)])
    upload_queue = UploadQueue([self.proj.getProjectName(), self.channel, str(self.resolution)])
    
    # load the image sizes
    [[ximage_size, yimage_size, zimage_size],(start_time, end_time)] = self.proj.datasetcfg.imageSize(self.resolution)
    # load the image offsets
    [x_offset, y_offset, z_offset] = self.proj.datasetcfg.getOffset()[self.resolution]

    # calculate the number of tiles
    # TODO KL account for xoffset and yoffset here
    num_xtiles = ximage_size / tile_size
    num_ytiles = yimage_size / tile_size
    
    # iterate over time
    for time in range(start_time, end_time+1, time_interval):
    
      # iterate over the x and y range
      for ytile in range(0, num_ytiles, 1):
        for xtile in range(0, num_xtiles, 1):
        
          # iterate over zrange
          for ztile in range(z_offset, zimage_size, 1):
            
            time_range = None if end_time - start_time == 0 else [time, time_interval]
            # generate a message for each one
            print("inserting message:x{}y{}z{}".format(xtile, ytile, ztile))
            message = UploadMessage.encode(self.proj.getProjectName(), self.channel, self.resolution, xtile, ytile, ztile, time_range)
            response = upload_queue.sendMessage(message)
            print(response)

    return queue_name
コード例 #4
0
def test_message(sqs):
    """Test put, get and delete Message"""

    x_tile = 0
    y_tile = 0

    proj = generate_proj()

    from ndingest.ndqueue.uploadqueue import UploadQueue

    UploadQueue.createQueue(proj)
    upload_queue = UploadQueue(proj)

    for z_tile in range(0, 2, 1):
        # encode the message
        message = serializer.encodeUploadMessage(
            proj.project_name,
            proj.channel_name,
            proj.resolution,
            x_tile,
            y_tile,
            z_tile,
        )
        # send message to the queue
        upload_queue.sendMessage(message)

    # receive message from the queue
    for message_id, receipt_handle, message_body in upload_queue.receiveMessage(
        number_of_messages=3
    ):
        # check if we get the tile_info back correctly
        assert message_body["z_tile"] in [0, 1, 2]
        # delete message from the queue
        response = upload_queue.deleteMessage(message_id, receipt_handle)
        # check if the message was sucessfully deleted
        assert "Successful" in response
コード例 #5
0
class Test_UploadLambda:
    def setup_class(self):
        """Setup class parameters"""
        # create the tile index table. skip if it exists
        try:
            TileIndexDB.createTable(endpoint_url=settings.DYNAMO_ENDPOINT)
        except Exception as e:
            pass
        self.tileindex_db = TileIndexDB(nd_proj.project_name,
                                        endpoint_url=settings.DYNAMO_ENDPOINT)
        # create the ingest queue
        IngestQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT)
        # create the upload queue
        UploadQueue.createQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT)
        self.upload_queue = UploadQueue(nd_proj,
                                        endpoint_url=settings.SQS_ENDPOINT)
        tile_bucket = TileBucket(nd_proj.project_name,
                                 endpoint_url=settings.S3_ENDPOINT)
        [self.x_tile, self.y_tile, self.z_tile] = [0, 0, 0]
        message = serializer.encodeUploadMessage(
            nd_proj.project_name,
            nd_proj.channel_name,
            nd_proj.resolution,
            self.x_tile,
            self.y_tile,
            self.z_tile,
        )
        # insert message in the upload queue
        self.upload_queue.sendMessage(message)
        # receive message and upload object
        for (
                message_id,
                receipt_handle,
                message_body,
        ) in self.upload_queue.receiveMessage():
            tile_handle = cStringIO.StringIO()
            tile_bucket.putObject(
                tile_handle,
                nd_proj.channel_name,
                nd_proj.resolution,
                self.x_tile,
                self.y_tile,
                self.z_tile,
                message_id,
                receipt_handle,
            )

    def teardown_class(self):
        """Teardown class parameters"""
        TileIndexDB.deleteTable(endpoint_url=settings.DYNAMO_ENDPOINT)
        IngestQueue.deleteQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT)
        UploadQueue.deleteQueue(nd_proj, endpoint_url=settings.SQS_ENDPOINT)

    def test_Uploadevent(self):
        """Testing the event"""
        # creating an emulambda function
        func = emulambda.import_lambda("uploadlambda.lambda_handler")
        # creating an emulambda event
        event = emulambda.parse_event(
            open("../ndlambda/functions/upload/upload_event.json").read())
        # calling the emulambda function to invoke a lambda
        emulambda.invoke_lambda(func, event, None, 0, None)

        # testing if the index was updated in tileindexdb
        supercuboid_key = self.tileindex_db.generatePrimaryKey(
            nd_proj.channel_name,
            nd_proj.resolution,
            self.x_tile,
            self.y_tile,
            self.z_tile,
        )
        item = self.tileindex_db.getItem(supercuboid_key)
        assert item["zindex_list"] == set([0])

        # testing if the message was deleted from the upload queue or not
        for message in self.upload_queue.receiveMessage():
            assert False
コード例 #6
0
ファイル: ingestmanager.py プロジェクト: neurodata/ndstore
class IngestManager(object):
    def __init__(self):
        # self.ds = None
        # self.pr = None
        # self.ch = None
        self.nd_proj = None
        self.ingest_job = None

    def createIngestJob(self, user_id, config_data):
        """Create an ingest job based on the posted config data"""

        config_data = json.loads(config_data)
        # validate schema
        if self.validateConfig(config_data):
            try:
                # create the upload queue
                UploadQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.upload_queue = UploadQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.upload_queue = self.upload_queue.url
                # create the ingest queue
                IngestQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.ingest_queue = IngestQueue(
                    self.nd_proj,
                    endpoint_url=ndingest_settings.SQS_ENDPOINT).url
                # create the cleanup queue
                CleanupQueue.createQueue(
                    self.nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
                self.ingest_job.cleanup_queue = CleanupQueue(
                    self.nd_proj,
                    endpoint_url=ndingest_settings.SQS_ENDPOINT).url
                self.generateUploadTasks()
                self.ingest_job.user_id = user_id
                self.ingest_job.save()
                return NDIngestJob.serialize(self.ingest_job._job)

            except Exception as e:
                print(e)
                raise NDWSError(e)

    def validateConfig(self, config_data):
        try:
            ndcg = Configuration(config_data)
            validator = ndcg.get_validator()
            validator.schema = ndcg.schema
            validator.validate_schema()
            ingest_job_json = json.dumps({
                'dataset':
                ndcg.config_data["database"]["dataset"],
                'project':
                ndcg.config_data["database"]["project"],
                'channel':
                ndcg.config_data["database"]["channel"],
                'resolution':
                ndcg.config_data["ingest_job"]["resolution"],
                'x_start':
                ndcg.config_data["ingest_job"]["extent"]["x"][0],
                'x_stop':
                ndcg.config_data["ingest_job"]["extent"]["x"][1],
                'y_start':
                ndcg.config_data["ingest_job"]["extent"]["y"][0],
                'y_stop':
                ndcg.config_data["ingest_job"]["extent"]["y"][1],
                'z_start':
                ndcg.config_data["ingest_job"]["extent"]["z"][0],
                'z_stop':
                ndcg.config_data["ingest_job"]["extent"]["z"][1],
                't_start':
                ndcg.config_data["ingest_job"]["extent"]["t"][0],
                't_stop':
                ndcg.config_data["ingest_job"]["extent"]["t"][1],
                'tile_size_x':
                ndcg.config_data["ingest_job"]["tile_size"]["x"],
                'tile_size_y':
                ndcg.config_data["ingest_job"]["tile_size"]["y"],
                'tile_size_z':
                ndcg.config_data["ingest_job"]["tile_size"]["z"],
                'tile_size_t':
                ndcg.config_data["ingest_job"]["tile_size"]["t"],
            })
            self.ingest_job = NDIngestJob.fromJson(ingest_job_json)
            self.nd_proj = NDIngestProj(self.ingest_job.project,
                                        self.ingest_job.channel,
                                        self.ingest_job.resolution)
        except jsonschema.ValidationError as e:
            raise NDWSError("Schema validation failed")
        except Exception as e:
            return NDWSError("Properties not found")
        return True

    def getIngestJob(self, job_id):
        """Get an ingest job based on job id"""

        try:
            ingest_job = NDIngestJob.fromId(job_id)
            return NDIngestJob.serialize(ingest_job._job)
        except Exception as e:
            print(e)
            raise

    def deleteIngestJob(self, job_id):
        """Delete an ingest job based on job id"""

        try:
            ingest_job = NDIngestJob.fromId(job_id)
            nd_proj = NDIngestProj(ingest_job.project, ingest_job.channel,
                                   ingest_job.resolution)
            # delete the upload queue
            UploadQueue.deleteQueue(
                nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
            # delete the ingest queue
            IngestQueue.deleteQueue(
                nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
            # delete the cleanup queue
            CleanupQueue.deleteQueue(
                nd_proj, endpoint_url=ndingest_settings.SQS_ENDPOINT)
            ingest_job.status = INGEST_STATUS_DELETED
            ingest_job.save()
        except Exception as e:
            print(e)
            raise

    def generateUploadTasks(self):
        """Populate the upload queue with tile names"""

        ds = NDDataset.fromName(self.ingest_job.dataset)
        # get the dataset configuration
        [[ximagesz, yimagesz, zimagesz],
         (starttime, endtime)] = ds.dataset_dim(self.ingest_job.resolution)
        [xoffset, yoffset, zoffset] = ds.get_offset(self.ingest_job.resolution)
        # [xsupercubedim, ysupercubedim, zsupercubedim] = supercubedim = map(mul, cubedim, SUPERCUBESIZE)
        # if ch.channel_type in TIMESERIES_CHANNELS:
        # logger.error("Timeseries data not supported for now. Error in {}".format(self.token))
        # raise NDWSError("Timeseries data not supported for now. Error in {}".format(self.token))

        num_xtiles = (ximagesz - xoffset) / self.ingest_job.tile_size_x
        num_ytiles = (yimagesz - yoffset) / self.ingest_job.tile_size_y
        num_ztiles = (zimagesz - zoffset) / self.ingest_job.tile_size_z

        # over all the tiles in the slice
        for ytile in range(0, num_ytiles, 1):
            for xtile in range(0, num_xtiles, 1):
                for ztile in range(0, num_ztiles, 1):

                    # encode and insert message in queue
                    # print "inserting message:x{}-y{}-z{}".format(xtile, ytile, ztile)
                    message = serializer.encodeUploadMessage(
                        self.nd_proj.project_name, self.nd_proj.channel_name,
                        self.nd_proj.resolution, xtile, ytile, ztile)
                    self.upload_queue.sendMessage(message)