Exemple #1
0
    def data_analysis(self, workstation, job):
        """Data analysis process step. """

        # Job is a single input entity

        num_entities = 1

        # Get resources required (a tuple of list of required alternative
        # resources)

        resources_required = \
            self._params.process_resources['data_analysis']['human_list']

        # Process time
        process_times = self._params.process_duration['data_analysis'][0]

        process_time = (process_times[0] + process_times[1] * num_entities +
                        process_times[2] * num_entities * job.batch_size)

        process_priority = self._params.process_priorities['data_analysis']

        # Generate new entity (one output entity per job)
        self._id_count += 1

        entity = Entity(_env=self._env,
                        _params=self._params,
                        batch_id=job.batch_id,
                        batch_size=self._params.basic_batch_size * 4,
                        entity_id=self._id_count,
                        entity_type='data analysis',
                        last_queue='q_completed',
                        last_queue_time_in=self._env.now,
                        parent_ids=[job.entity_id],
                        priority=job.priority,
                        time_in=job.time_in,
                        time_stamps=job.time_stamps.copy())

        self._env.process(
            self.occupy_resources_single_subprocess(
                workstation=workstation,
                resources_required=resources_required,
                process_time=process_time,
                priority=process_priority,
                entity_to_create=entity,
                queue_to_add_new_entity='q_completed',
                process_step='data_analysis'))

        self.record_queuing_time('q_data_analysis', job.last_queue_time_in,
                                 self._env.now)

        # This is the last stage - add to output log
        output_log = [
            job.batch_id, self._env.now, job.batch_size, job.time_in,
            self._env.now
        ]
        self._count_out.append(output_log)
Exemple #2
0
    def sample_prep_auto(self, workstation, job):
        """
        """
        # Job is a single input entity

        num_entities = 1

        # Get resources required (a tuple of list of required alternative
        # resources)
        human_resources = self._params.process_resources['sample_prep_auto'][
            'human_list']
        machine_resources = self._params.process_resources['sample_prep_auto'][
            'machine_list']

        # Process time
        process_times = self._params.process_duration['sample_prep_auto']

        stage_process_times = []
        for stage in process_times:
            process_time = (stage[0] + stage[1] * num_entities +
                            stage[2] * num_entities * job.batch_size)
            stage_process_times.append(process_time)

        process_priority = self._params.process_priorities['sample_prep_auto']

        # Generate new entity (one output entity per job)
        self._id_count += 1

        # Define entity to create
        entity = Entity(_env=self._env,
                        _params=self._params,
                        batch_id=job.batch_id,
                        batch_size=self._params.basic_batch_size,
                        entity_id=self._id_count,
                        entity_type='samples in plate for pcr',
                        last_queue='q_heat_collation',
                        last_queue_time_in=self._env.now,
                        parent_ids=[job.entity_id],
                        priority=job.priority,
                        time_in=job.time_in,
                        time_stamps=job.time_stamps.copy())

        # Define queue to add new entitiy to
        self._env.process(
            self.occupy_resources_automated_subprocess(
                workstation=workstation,
                human_resources=human_resources,
                machine_resources=machine_resources,
                stage_process_times=stage_process_times,
                priority=process_priority,
                entity_to_create=entity,
                queue_to_add_new_entity='q_heat_collation',
                process_step='sample_prep_auto'))

        self.record_queuing_time('q_sample_prep', job.last_queue_time_in,
                                 self._env.now)
Exemple #3
0
    def sample_receipt(self, workstation, job):
        """
        Process as described:
            Takes batches of 250 samples. Log and rack into racks of samples.
            Time taken = 133 min         

        """

        # Job is a single input entity

        num_entities = 1

        # Get resources required (a tuple of list of required alternative
        # resources)
        resources_required = self._params.process_resources['sample_receipt'][
            'human_list']

        # Process time
        process_times = self._params.process_duration['sample_receipt'][0]

        process_time = (process_times[0] + process_times[1] * num_entities +
                        process_times[2] * num_entities * job.batch_size)

        process_priority = self._params.process_priorities['sample_receipt']

        # Generate new entity (one output entity per job)
        self._id_count += 1

        entity = Entity(_env=self._env,
                        _params=self._params,
                        batch_id=job.batch_id,
                        batch_size=self._params.basic_batch_size,
                        entity_id=self._id_count,
                        entity_type='rack of tubes for sample prep',
                        last_queue='q_sample_prep',
                        last_queue_time_in=self._env.now,
                        parent_ids=[job.entity_id],
                        priority=job.priority,
                        time_in=job.time_in,
                        time_stamps=job.time_stamps.copy())

        self._env.process(
            self.occupy_resources_single_subprocess(
                workstation=workstation,
                resources_required=resources_required,
                process_time=process_time,
                priority=process_priority,
                entity_to_create=entity,
                queue_to_add_new_entity='q_sample_prep',
                process_step='sample_receipt'))

        self.record_queuing_time('q_sample_receipt', job.last_queue_time_in,
                                 self._env.now)
Exemple #4
0
    def transfer_1(self, workstation, job):
        """
        Process as described:
            Transfer

        """

        # Job is a single input entity

        num_entities = 1

        # Get resources required (a tuple of list of required alternative
        # resources)
        resources_required = self._params.process_resources['transfer_1'][
            'human_list']

        # Process time
        process_times = self._params.process_duration['transfer_1'][0]

        process_time = (process_times[0] + process_times[1] * num_entities +
                        process_times[2] * num_entities * job.batch_size)

        process_priority = self._params.process_priorities['transfer_1']

        # Generate new entity (one output entity per job)
        self._id_count += 1

        entity = Entity(_env=self._env,
                        _params=self._params,
                        batch_id=job.batch_id,
                        batch_size=self._params.basic_batch_size,
                        entity_id=self._id_count,
                        entity_type='plates in transfer',
                        last_queue='q_transfer_1_split',
                        last_queue_time_in=self._env.now,
                        parent_ids=[job.entity_id],
                        priority=job.priority,
                        time_in=job.time_in,
                        time_stamps=job.time_stamps.copy())

        self._env.process(
            self.occupy_resources_single_subprocess(
                workstation=workstation,
                resources_required=resources_required,
                process_time=process_time,
                priority=process_priority,
                entity_to_create=entity,
                queue_to_add_new_entity='q_transfer_1_split',
                process_step='transfer_1'))

        self.record_queuing_time('q_transfer_1', job.last_queue_time_in,
                                 self._env.now)
Exemple #5
0
    def pcr(self, workstation, job):

        num_entities = 1

        # Get resources required (a tuple of lists of required alternative
        # resources)
        human_resources = self._params.process_resources['pcr']['human_list']
        machine_resources = self._params.process_resources['pcr'][
            'machine_list']

        # Process time
        process_times = self._params.process_duration['pcr']

        stage_process_times = []
        for stage in process_times:
            process_time = (stage[0] + stage[1] * num_entities +
                            stage[2] * num_entities * job.batch_size)
            stage_process_times.append(process_time)

        process_priority = self._params.process_priorities['pcr']

        # Generate new entity (one output entity per job)
        self._id_count += 1

        entity = Entity(_env=self._env,
                        _params=self._params,
                        batch_id=job.batch_id,
                        batch_size=self._params.basic_batch_size * 4,
                        entity_id=self._id_count,
                        entity_type='pcr output',
                        last_queue='q_data_analysis',
                        last_queue_time_in=self._env.now,
                        parent_ids=[job.entity_id],
                        priority=job.priority,
                        time_in=job.time_in,
                        time_stamps=job.time_stamps.copy())

        self._env.process(
            self.occupy_resources_automated_subprocess(
                workstation=workstation,
                human_resources=human_resources,
                machine_resources=machine_resources,
                stage_process_times=stage_process_times,
                priority=process_priority,
                entity_to_create=entity,
                queue_to_add_new_entity='q_data_analysis',
                process_step='pcr'))

        self.record_queuing_time('q_pcr', job.last_queue_time_in,
                                 self._env.now)
Exemple #6
0
    def collate(self, batch_size, from_queue, to_queue):
        """ Admin step that requires no time or resources.
        Use the first entity form each batch for the batch id and time in."""
        while self._queues[from_queue].qsize() >= batch_size:
            parent_ids = []
            new_batch_size = 0
            # Priority will be set to highest priority in batch (lowest #)
            priority = 9999
            # Get entities to combine
            for i in range(batch_size):
                ent = self._queues[from_queue].get()[1]
                new_batch_size += ent.batch_size
                parent_ids.append(ent.entity_id)

                # Record queuing time
                self.record_queuing_time(ent.last_queue,
                                         ent.last_queue_time_in, self._env.now)

                # Use initial batch id and time in from first entity
                if i == 0:
                    batch_id = ent.batch_id
                    time_in = ent.time_in
                    time_stamps = ent.time_stamps

                # Adjust priority if new higher priority batch found
                if ent.priority < priority:
                    priority = ent.priority

            # Generate new entity
            self._id_count += 1
            new_ent = Entity(_env=self._env,
                             _params=self._params,
                             batch_id=batch_id,
                             batch_size=new_batch_size,
                             entity_id=self._id_count,
                             entity_type='collated',
                             last_queue=to_queue,
                             last_queue_time_in=self._env.now,
                             parent_ids=parent_ids,
                             priority=priority,
                             time_in=time_in,
                             time_stamps=time_stamps)
            # Add to queue
            self._queues[to_queue].put((priority, new_ent))
Exemple #7
0
    def batch_input(self, workstation, job):
        """Create job batches of samples for model (not a physical step;
        does not need resources)."""

        orginal_batch_size = job.batch_size
        # Round batches up to complete  batches
        new_batches = int(
            np.ceil(orginal_batch_size / self._params.basic_batch_size))

        job.time_stamps['time_in_batched'] = self._env.now

        if new_batches > 1:
            for _batch in range(new_batches):
                self._id_count += 1
                # Set priority
                if random.random() < self._params.high_priority:
                    priority = 0
                else:
                    priority = 100

                entity = Entity(_env=self._env,
                                _params=self._params,
                                batch_id=job.batch_id,
                                batch_size=self._params.basic_batch_size,
                                entity_id=self._id_count,
                                entity_type='sample tubes',
                                last_queue='q_sample_preprocess',
                                last_queue_time_in=self._env.now,
                                parent_ids=[job.entity_id],
                                priority=priority + self._id_count / 1e4,
                                time_in=job.time_in,
                                time_stamps=job.time_stamps)

                # Add to sample_accession queue
                # Keep all priority different - use id
                item = (entity.priority, entity)
                self._queues['q_sample_preprocess'].put(item)

        self._workstation_assigned_jobs[workstation] -= 1
Exemple #8
0
    def generate_input(self, arrival_time):
        """Continuous loop of work arrival. Adds new work to batch input."""
        # First delivery
        yield self._env.timeout(arrival_time)
        # While loop continues generating new patients throughout model run
        while True:
            # Get delivery batch size based on hour
            hours = int(self._env.now / 60)
            day = int(hours / 24)
            hour = hours - (day * 24)
            delivery_size = self._params.delivery_batch_sizes[hour]
            # generate new entity and add to list of current entities
            self._id_count += 1
            self._batch_id_count += 1
            time_stamps = dict()
            time_stamps['time_in'] = self._env.now
            arrival_ent = Entity(_env=self._env,
                                 _params=self._params,
                                 batch_id=self._batch_id_count,
                                 batch_size=delivery_size,
                                 entity_id=self._id_count,
                                 entity_type='arrival batch',
                                 parent_ids=[],
                                 priority=99999,
                                 last_queue='q_batch_input',
                                 last_queue_time_in=self._env.now,
                                 time_in=self._env.now,
                                 time_stamps=time_stamps)

            # Add to queue for batching input
            self._queues['q_batch_input'].put((1, arrival_ent))

            # Log input
            input_log = [self._batch_id_count, self._env.now, delivery_size]
            self._count_in.append(input_log)

            # Schedule next admission
            yield self._env.timeout(self._params.day_duration)
Exemple #9
0
 def split(self, batch_size, from_queue, to_queue):
     """ Admin step that requires no time or resources"""
     while not self._queues[from_queue].empty():
         ent = self._queues[from_queue].get()[1]
         # Generate new entities
         new_batch_size = int(ent.batch_size / batch_size)
         for i in range(batch_size):
             self._id_count += 1
             new_ent = Entity(
                 _env=self._env,
                 _params=self._params,
                 batch_id=ent.batch_id,
                 batch_size=new_batch_size,
                 entity_id=self._id_count,
                 entity_type='split',
                 last_queue=to_queue,
                 last_queue_time_in=self._env.now,
                 parent_ids=ent.entity_id,
                 # Tweak priority to avoid clash of priorities
                 priority=ent.priority + i / 1e6,
                 time_in=ent.time_in,
                 time_stamps=ent.time_stamps)
             # Add to queue
             self._queues[to_queue].put((new_ent.priority, new_ent))