def test_run(self):
        """Test getting a task from the upload queue"""
        engine = Engine(self.config_file, self.api_token, 23)
        engine.msg_wait_iterations = 0

        # Put some stuff on the task queue
        self.setup_helper.add_volumetric_tasks(self.aws_creds["access_key"],
                                               self.aws_creds['secret_key'],
                                               self.upload_queue_url,
                                               engine.backend)

        engine.join()
        engine.run()

        # Check for tile to exist
        s3 = boto3.resource('s3')
        ingest_bucket = s3.Bucket(self.ingest_bucket_name)

        with tempfile.NamedTemporaryFile() as test_file:
            with open(test_file.name, 'wb') as raw_data:
                ingest_bucket.download_fileobj(VOLUMETRIC_CUBOID_KEY, raw_data)
            with open(test_file.name, 'rb') as raw_data:
                # Using an empty CloudVolume dataset so all values should be 0.
                # dtype set in boss-v0.2-test.json under chunk_processor.params.info.data_type
                cuboid = self.s3_object_to_cuboid(raw_data.read(), 'uint8')
                unique_vals = np.unique(cuboid)
                assert 1 == len(unique_vals)
                assert 0 == unique_vals[0]
Example #2
0
def worker_process_run(config_file, api_token, job_id, pipe):
    """A worker process main execution function. Generates an engine, and joins the job
       (that was either created by the main process or joined by it).
       Ends when no more tasks are left that can be executed.

    Args:
        config_file(str): the path to the configuration file to initialize the engine with.
        api_token(str): the token to initialize the engine with.
        job_id(int): the id of the job the engine needs to join with.
        pipe(multiprocessing.Pipe): the receiving end of the pipe that communicates with the master process.
    """

    always_log_info("Creating new worker process, pid={}.".format(os.getpid()))
    # Create the engine
    try:
        engine = Engine(config_file, api_token, job_id)
    except ConfigFileError as err:
        print("ERROR (pid: {}): {}".format(os.getpid(), err))
        sys.exit(1)

    # Join job
    engine.join()

    # Start it up!
    should_run = True
    while should_run:
        try:
            engine.run()
            # run will end if no more jobs are available
            should_run = False
        except KeyboardInterrupt:
            # Make sure they want to stop this client, wait for the main process to send the next step
            should_run = pipe.recv()
    always_log_info("  - Process pid={} finished gracefully.".format(os.getpid()))
Example #3
0
    def test_run(self):
        """Test getting a task from the upload queue"""
        engine = Engine(self.config_file, self.api_token, 23)
        engine.msg_wait_iterations = 2

        NUM_EXPECTED_TASKS = 4

        # Put some stuff on the task queue
        self.setup_helper.add_tasks(self.aws_creds["access_key"],
                                    self.aws_creds['secret_key'],
                                    self.upload_queue_url, engine.backend)
        sqs = boto3.resource('sqs')
        upload_queue = sqs.Queue(self.upload_queue_url)

        # Make sure add_tasks() is putting the number of msgs we expect.
        self.assertEqual(
            NUM_EXPECTED_TASKS,
            int(upload_queue.attributes['ApproximateNumberOfMessages']))

        engine.join()
        engine.run()

        # Check for tile to exist
        s3 = boto3.resource('s3')
        tile_bucket = s3.Bucket(self.tile_bucket_name)

        with tempfile.NamedTemporaryFile() as test_file:
            with open(test_file.name, 'wb') as data:
                tile_bucket.download_fileobj(
                    "03ca58a12ec662954ac12e06517d4269&1&2&3&0&5&6&1&0", data)

                # Make sure the key was valid an data was loaded into the file handles
                assert data.tell() == 182300

        # Make sure all msgs removed from upload queue
        upload_queue.reload()
        self.assertEqual(
            0, int(upload_queue.attributes['ApproximateNumberOfMessages']))
        self.assertEqual(
            0,
            int(upload_queue.
                attributes['ApproximateNumberOfMessagesNotVisible']))

        # Make sure all msgs were placed on tile index queue
        tile_index_queue = sqs.Queue(self.tile_index_queue_url)
        self.assertEqual(
            NUM_EXPECTED_TASKS,
            int(tile_index_queue.attributes['ApproximateNumberOfMessages']))
Example #4
0
    def test_run(self):
        """Test getting a task from the upload queue"""
        engine = Engine(self.config_file, self.api_token, 23)
        engine.msg_wait_iterations = 2

        # Put some stuff on the task queue
        self.setup_helper.add_tasks(self.aws_creds["access_key"], self.aws_creds['secret_key'], self.queue_url, engine.backend)

        engine.join()
        engine.run()

        # Check for tile to exist
        s3 = boto3.resource('s3')
        tile_bucket = s3.Bucket(self.tile_bucket_name)

        with tempfile.NamedTemporaryFile() as test_file:
            with open(test_file.name, 'wb') as data:
                tile_bucket.download_fileobj("03ca58a12ec662954ac12e06517d4269&1&2&3&0&5&6&1&0", data)

                # Make sure the key was valid an data was loaded into the file handles
                assert data.tell() == 182300