Exemple #1
0
def main(args):
	vehicle = Vehicle("Opel", "Corsa")
	vehicle.engine_on()
	signal.signal(signal.SIGINT, signal_handler)
	signal.signal(signal.SIGTERM, signal_handler)
	print('Press Ctrl+C to exit')
	ThreadManager.waitForFinish()
	signal.pause()
	return 0
    def setup_consumer_threads(self):
        """ Create ThreadManager object with base broker url and kwargs to setup consumers.

            :params: None.

            :return: None.
        """
        base_broker_url = "amqp://" + self._msg_name + ":" + \
                                            self._msg_passwd + "@" + \
                                            str(self._base_broker_addr)
        LOGGER.info('Building _base_broker_url. Result is %s', base_broker_url)

        self.shutdown_event = threading.Event()
        self.shutdown_event.clear()

        # Set up kwargs that describe consumers to be started
        # The Archive Device needs three message consumers
        kws = {}
        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-aux_foreman_consume'
        md['queue'] = 'at_foreman_consume'
        md['callback'] = self.on_aux_foreman_message
        md['format'] = "YAML"
        md['test_val'] = None
        kws[md['name']] = md

        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-at_foreman_ack_publish'
        md['queue'] = 'at_foreman_ack_publish'
        md['callback'] = self.on_ack_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-archive_ctrl_publish'
        md['queue'] = 'archive_ctrl_publish'
        md['callback'] = self.on_archive_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        self.thread_manager = ThreadManager('thread-manager', kws,
                                            self.shutdown_event)
        self.thread_manager.start()
Exemple #3
0
class Main(object):
    def __init__(self):
        self.module_manager = ModuleManager(self)
        self.thread_manager = ThreadManager()

        self.active = True

    def start(self):
        self.module_manager.start_modules()

        while self.active:
            self.thread_manager.check_scheduled()

            if not self.active:
                self.__exit()

    def __exit(self):
        self.thread_manager.stop_all()
        # Hacky way to get a clean exit when exiting through the display exit button:
        os._exit(0)
    def setup_consumer(self):
        LOGGER.info('Setting up archive consumers on %s',
                    self._base_broker_url)
        LOGGER.info('Running start_new_thread for archive consumer')

        self.shutdown_event = threading.Event()
        self.shutdown_event.clear()

        kws = {}
        md = {}
        md['amqp_url'] = self._base_broker_url
        md['name'] = 'Thread-ar_ctrl_consume'
        md['queue'] = self.ARCHIVE_CTRL_CONSUME
        md['callback'] = self.on_archive_message
        md['format'] = "YAML"
        md['test_val'] = None
        kws[md['name']] = md

        self.thread_manager = ThreadManager('thread-manager', kws,
                                            self.shutdown_event)
        self.thread_manager.start()
    def setup_consumer_threads(self):
        LOGGER.info('Building _base_broker_url')
        base_broker_url = "amqp://" + self._sub_base_name + ":" + \
                                            self._sub_base_passwd + "@" + \
                                            str(self._base_broker_addr)

        ncsa_broker_url = "amqp://" + self._sub_ncsa_name + ":" + \
                                            self._sub_ncsa_passwd + "@" + \
                                            str(self._ncsa_broker_addr)

        self.shutdown_event = threading.Event()

        # Set up kwargs that describe consumers to be started
        # The Archive Device needs three message consumers
        kws = {}
        md = {}
        md['amqp_url'] = ncsa_broker_url
        md['name'] = 'Thread-ncsa_foreman_ack_publish'
        md['queue'] = 'ncsa_foreman_ack_publish'
        md['callback'] = self.on_ack_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-ncsa_consume'
        md['queue'] = 'ncsa_consume'
        md['callback'] = self.on_pp_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        self.thread_manager = ThreadManager('thread-manager', kws,
                                            self.shutdown_event)
        self.thread_manager.start()
Exemple #6
0
def signal_handler(sig, frame):
	print "Signal received, stopping"
	ThreadManager.terminate()
	sys.exit(42)
class NcsaForeman:
    NCSA_CONSUME = "ncsa_consume"
    NCSA_PUBLISH = "ncsa_publish"
    COMPONENT_NAME = 'NCSA_FOREMAN'
    DISTRIBUTOR_PUBLISH = "distributor_publish"
    ACK_PUBLISH = "ack_publish"
    CFG_FILE = 'L1SystemCfg.yaml'
    prp = toolsmod.prp

    def __init__(self, filename=None):
        toolsmod.singleton(self)

        self._config_file = self.CFG_FILE
        if filename != None:
            self._config_file = filename

        #self._pairing_dict = {}

        LOGGER.info('Extracting values from Config dictionary')
        self.extract_config_values()

        self._msg_actions = {
            'NCSA_NEXT_VISIT': self.set_visit,
            'NCSA_NEW_SESSION': self.set_session,
            'NCSA_START_INTEGRATION': self.process_start_integration,
            'NCSA_READOUT': self.process_readout,
            'DISTRIBUTOR_HEALTH_CHECK_ACK': self.process_ack,
            'DISTRIBUTOR_XFER_PARAMS_ACK': self.process_ack,
            'DISTRIBUTOR_READOUT_ACK': self.process_ack
        }

        self._next_timed_ack_id = 10000

        self.setup_publishers()

        self.setup_scoreboards()

        self.setup_publishers()
        self.setup_consumer_threads()

        LOGGER.info('Ncsa foreman consumer setup')
        self.thread_manager = None
        self.setup_consumer_threads()

        LOGGER.info('Ncsa Foreman Init complete')

    def setup_publishers(self):
        self._pub_base_broker_url = "amqp://" + self._pub_base_name + ":" + \
                                                self._pub_base_passwd + "@" + \
                                                str(self._base_broker_addr)

        self._pub_ncsa_broker_url = "amqp://" + self._pub_ncsa_name + ":" + \
                                                self._pub_ncsa_passwd + "@" + \
                                                str(self._ncsa_broker_addr)

        LOGGER.info('Setting up Base publisher on %s using %s', \
                     self._pub_base_broker_url, self._base_msg_format)
        self._base_publisher = SimplePublisher(self._pub_base_broker_url,
                                               self._base_msg_format)

        LOGGER.info('Setting up NCSA publisher on %s using %s', \
                     self._pub_ncsa_broker_url, self._ncsa_msg_format)
        self._ncsa_publisher = SimplePublisher(self._pub_ncsa_broker_url,
                                               self._ncsa_msg_format)

    def on_pp_message(self, ch, method, properties, body):
        ch.basic_ack(method.delivery_tag)
        msg_dict = body
        LOGGER.debug('Message from PP callback message body is: %s',
                     self.prp.pformat(msg_dict))

        handler = self._msg_actions.get(msg_dict[MSG_TYPE])
        result = handler(msg_dict)

    def on_ack_message(self, ch, method, properties, body):
        ch.basic_ack(method.delivery_tag)
        msg_dict = body
        LOGGER.info('In ACK message callback')
        LOGGER.debug('Message from ACK callback message body is: %s',
                     self.prp.pformat(msg_dict))

        handler = self._msg_actions.get(msg_dict[MSG_TYPE])
        result = handler(msg_dict)

    def set_visit(self, params):
        bore_sight = params['BORE_SIGHT']
        visit_id = params['VISIT_ID']
        self.JOB_SCBD.set_visit_id(visit_id, bore_sight)
        ack_id = params['ACK_ID']
        msg = {}
        ###
        ### Send Boresight to Someone here...
        ###
        msg['MSG_TYPE'] = 'NCSA_NEXT_VISIT_ACK'
        msg['COMPONENT'] = self.COMPONENT_NAME
        msg['ACK_ID'] = ack_id
        msg['ACK_BOOL'] = True
        route_key = params['REPLY_QUEUE']
        self._base_publisher.publish_message(route_key, msg)

    def set_session(self, params):
        self.JOB_SCBD.set_session(params['SESSION_ID'])
        ack_id = params['ACK_ID']
        msg = {}
        msg['MSG_TYPE'] = 'NCSA_NEW_SESSION_ACK'
        msg['COMPONENT'] = self.COMPONENT_NAME
        msg['ACK_ID'] = ack_id
        msg['ACK_BOOL'] = True
        route_key = params['REPLY_QUEUE']
        self._base_publisher.publish_message(route_key, msg)

    def process_start_integration(self, params):
        job_num = str(params[JOB_NUM])
        image_id = params['IMAGE_ID']
        visit_id = params['VISIT_ID']
        response_timed_ack_id = params["ACK_ID"]
        LOGGER.info('NCSA received Start Integration message from Base')
        LOGGER.debug('NCSA Start Integration incoming message: %s' % params)

        forwarders_list = params['FORWARDERS']['FORWARDER_LIST']
        ccd_list = params['FORWARDERS']['CCD_LIST']  # A list of lists...
        len_forwarders_list = len(forwarders_list)
        self.JOB_SCBD.add_job(job_num, image_id, visit_id, ccd_list)
        LOGGER.info('Received new job %s. Needed workers is %s', job_num,
                    str(len_forwarders_list))

        # run distributor health check
        # get timed_ack_id
        timed_ack = self.get_next_timed_ack_id("DISTRIBUTOR_HEALTH_CHECK_ACK")

        distributors = self.DIST_SCBD.return_distributors_list()
        # Mark all healthy distributors Unknown
        state_unknown = {"STATE": "HEALTH_CHECK", "STATUS": "UNKNOWN"}
        self.DIST_SCBD.set_distributor_params(distributors, state_unknown)

        # send health check messages
        ack_params = {}
        ack_params[MSG_TYPE] = "DISTRIBUTOR_HEALTH_CHECK"
        ack_params['REPLY_QUEUE'] = 'ncsa_foreman_ack_publish'
        ack_params["ACK_ID"] = timed_ack
        ack_params[JOB_NUM] = job_num
        for distributor in distributors:
            self._ncsa_publisher.publish_message(
                self.DIST_SCBD.get_value_for_distributor(
                    distributor, "CONSUME_QUEUE"), ack_params)

        # start timers
        self.ack_timer(2)

        # at end of timer, get list of distributors
        dicts_of_distributors = self.ACK_SCBD.get_components_for_timed_ack(
            timed_ack)
        healthy_distributors = list(dicts_of_distributors.keys())

        # update distributor scoreboard with healthy distributors
        healthy_status = {"STATUS": "HEALTHY"}
        self.DIST_SCBD.set_distributor_params(healthy_distributors,
                                              healthy_status)

        num_healthy_distributors = len(healthy_distributors)
        if len_forwarders_list > num_healthy_distributors:
            print("Cannot Do Job - more fwdrs than dists")
            # send response msg to base refusing job
            LOGGER.info(
                'Reporting to base insufficient healthy distributors for job #%s',
                job_num)
            ncsa_params = {}
            ncsa_params[MSG_TYPE] = "NCSA_RESOURCES_QUERY_ACK"
            ncsa_params[JOB_NUM] = job_num
            ncsa_params["ACK_BOOL"] = False
            ncsa_params["ACK_ID"] = response_timed_ack_id
            self._base_publisher.publish_message(NCSA_PUBLISH,
                                                 yaml.dump(ncsa_params))
            # delete job and leave distributors in Idle state
            self.JOB_SCBD.delete_job(job_num)
            idle_state = {"STATE": "IDLE"}
            self.DIST_SCBD.set_distributor_params(healthy_distributors,
                                                  idle_state)

        else:
            Pairs = self.assemble_pairs(forwarders_list, ccd_list,
                                        healthy_distributors)
            self.JOB_SCBD.set_pairs_for_job(job_num, Pairs)

            # send pair info to each distributor
            job_params_ack = self.get_next_timed_ack_id(
                'DISTRIBUTOR_XFER_PARAMS_ACK')
            for j in range(0, len(Pairs)):
                tmp_msg = {}
                tmp_msg[MSG_TYPE] = 'DISTRIBUTOR_XFER_PARAMS'
                tmp_msg['XFER_PARAMS'] = Pairs[j]
                tmp_msg[JOB_NUM] = job_num
                tmp_msg[ACK_ID] = job_params_ack
                tmp_msg['REPLY_QUEUE'] = 'ncsa_foreman_ack_publish'
                tmp_msg['VISIT_ID'] = visit_id
                tmp_msg['IMAGE_ID'] = image_id
                fqn = Pairs[j]['DISTRIBUTOR']['FQN']
                route_key = self.DIST_SCBD.get_value_for_distributor(
                    fqn, 'CONSUME_QUEUE')
                self._ncsa_publisher.publish_message(route_key, tmp_msg)

            self.DIST_SCBD.set_distributor_params(healthy_distributors,
                                                  {STATE: IN_READY_STATE})
            dist_params_response = self.progressive_ack_timer(
                job_params_ack, num_healthy_distributors, 2.0)

            if dist_params_response == None:
                print(
                    "RECEIVED NO ACK RESPONSES FROM DISTRIBUTORS AFTER SENDING XFER PARAMS"
                )
                pass  #Do something such as raise a system wide exception

            # Now inform PP Foreman that all is in ready state
            ncsa_params = {}
            ncsa_params[MSG_TYPE] = "NCSA_START_INTEGRATION_ACK"
            ncsa_params[JOB_NUM] = job_num
            ncsa_params['IMAGE_ID'] = image_id
            ncsa_params['VISIT_ID'] = visit_id
            ncsa_params['SESSION_ID'] = params['SESSION_ID']
            ncsa_params['COMPONENT'] = 'NCSA_FOREMAN'
            ncsa_params[ACK_BOOL] = True
            ncsa_params["ACK_ID"] = response_timed_ack_id
            ncsa_params["PAIRS"] = Pairs
            self._base_publisher.publish_message(params['REPLY_QUEUE'],
                                                 ncsa_params)
            LOGGER.info(
                'Sufficient distributors and workers are available. Informing Base'
            )
            LOGGER.debug('NCSA Start Integration incoming message: %s' %
                         ncsa_params)

            LOGGER.info(
                'The following pairings have been sent to the Base for job %s:'
                % job_num)
            LOGGER.info(Pairs)

    def assemble_pairs(self, forwarders_list, ccd_list, healthy_distributors):

        #build dict...
        PAIRS = []

        for i in range(0, len(forwarders_list)):
            tmp_dict = {}
            sub_dict = {}
            tmp_dict['FORWARDER'] = forwarders_list[i]
            tmp_dict['CCD_LIST'] = ccd_list[i]
            tmp_dict['DISTRIBUTOR'] = {}
            distributor = healthy_distributors[i]
            sub_dict['FQN'] = distributor
            sub_dict['HOSTNAME'] = self.DIST_SCBD.get_value_for_distributor(
                distributor, HOSTNAME)
            sub_dict['NAME'] = self.DIST_SCBD.get_value_for_distributor(
                distributor, NAME)
            sub_dict['IP_ADDR'] = self.DIST_SCBD.get_value_for_distributor(
                distributor, IP_ADDR)
            sub_dict['TARGET_DIR'] = self.DIST_SCBD.get_value_for_distributor(
                distributor, TARGET_DIR)
            tmp_dict['DISTRIBUTOR'] = sub_dict
            PAIRS.append(tmp_dict)

        return PAIRS

    def process_readout(self, params):
        job_number = params[JOB_NUM]
        response_ack_id = params[ACK_ID]
        pairs = self.JOB_SCBD.get_pairs_for_job(job_number)
        sleep(3)
        len_pairs = len(pairs)
        ack_id = self.get_next_timed_ack_id(DISTRIBUTOR_READOUT_ACK)
        # The following line extracts the distributor FQNs from pairs dict using
        # list comprehension values; faster than for loops
        # distributors = [v['FQN'] for v in list(pairs.values())]
        for i in range(0, len_pairs):  # Pairs is a list of dictionaries
            distributor = pairs[i]['DISTRIBUTOR']['FQN']
            msg_params = {}
            msg_params[MSG_TYPE] = DISTRIBUTOR_READOUT
            msg_params[JOB_NUM] = job_number
            msg_params['REPLY_QUEUE'] = 'ncsa_foreman_ack_publish'
            msg_params[ACK_ID] = ack_id
            routing_key = self.DIST_SCBD.get_routing_key(distributor)
            self.DIST_SCBD.set_distributor_state(distributor, 'START_READOUT')
            self._ncsa_publisher.publish_message(routing_key, msg_params)

        distributor_responses = self.progressive_ack_timer(
            ack_id, len_pairs, 24)

        if distributor_responses != None:
            RESULT_LIST = {}
            CCD_LIST = []
            RECEIPT_LIST = []
            ncsa_params = {}
            ncsa_params[MSG_TYPE] = NCSA_READOUT_ACK
            ncsa_params[JOB_NUM] = job_number
            ncsa_params['IMAGE_ID'] = params['IMAGE_ID']
            ncsa_params['VISIT_ID'] = params['VISIT_ID']
            ncsa_params['SESSION_ID'] = params['SESSION_ID']
            ncsa_params['COMPONENT'] = 'NCSA_FOREMAN'
            ncsa_params[ACK_ID] = response_ack_id
            ncsa_params[ACK_BOOL] = True
            distributors = list(distributor_responses.keys())
            for dist in distributors:
                ccd_list = distributor_responses[dist]['RESULT_LIST'][
                    'CCD_LIST']
                receipt_list = distributor_responses[dist]['RESULT_LIST'][
                    'RECEIPT_LIST']
                for i in range(0, len(ccd_list)):
                    CCD_LIST.append(ccd_list[i])
                    RECEIPT_LIST.append(receipt_list[i])
            RESULT_LIST['CCD_LIST'] = CCD_LIST
            RESULT_LIST['RECEIPT_LIST'] = RECEIPT_LIST
            ncsa_params['RESULT_LIST'] = RESULT_LIST
            self._base_publisher.publish_message(params['REPLY_QUEUE'],
                                                 msg_params)

        else:
            ncsa_params = {}
            ncsa_params[MSG_TYPE] = NCSA_READOUT_ACK
            ncsa_params[JOB_NUM] = job_number
            ncsa_params['COMPONENT_NAME'] = NCSA_FOREMAN
            ncsa_params['IMAGE_ID'] = params['IMAGE_ID']
            ncsa_params['VISIT_ID'] = params['VISIT_ID']
            ncsa_params['SESSION_ID'] = params['SESSION_ID']
            ncsa_params[ACK_ID] = response_ack_id
            ncsa_params[ACK_BOOL] = FALSE
            ncsa_params['RESULT_LIST'] = {}
            ncsa_params['RESULT_LIST']['CCD_LIST'] = None
            ncsa_params['RESULT_LIST']['RECEIPT_LIST'] = None
            self._base_publisher.publish_message(params['REPLY_QUEUE'],
                                                 msg_params)

    def process_ack(self, params):
        self.ACK_SCBD.add_timed_ack(params)

    def get_next_timed_ack_id(self, ack_type):
        self._next_timed_ack_id = self._next_timed_ack_id + 1
        retval = ack_type + "_" + str(self._next_timed_ack_id).zfill(6)
        return retval

    def ack_timer(self, seconds):
        sleep(seconds)
        return True

    def progressive_ack_timer(self, ack_id, expected_replies, seconds):
        counter = 0.0
        while (counter < seconds):
            counter = counter + 0.5
            sleep(0.5)
            response = self.ACK_SCBD.get_components_for_timed_ack(ack_id)
            if response == None:
                continue
            if len(list(response.keys())) == expected_replies:
                return response

        ## Try one final time
        response = self.ACK_SCBD.get_components_for_timed_ack(ack_id)
        if response == None:
            return None
        elif len(list(response.keys())) == expected_replies:
            return response
        else:
            return None

    def extract_config_values(self):
        try:
            cdm = toolsmod.intake_yaml_file(self._config_file)
        except IOError as e:
            LOGGER.critical("Unable to find CFG Yaml file %s\n" %
                            self._config_file)
            sys.exit(101)

        try:
            self._base_broker_addr = cdm[ROOT][BASE_BROKER_ADDR]
            self._ncsa_broker_addr = cdm[ROOT][NCSA_BROKER_ADDR]

            self._sub_base_name = cdm[ROOT][
                'NFM_BASE_BROKER_NAME']  # Message broker user & passwd
            self._sub_base_passwd = cdm[ROOT]['NFM_BASE_BROKER_PASSWD']
            self._sub_ncsa_name = cdm[ROOT][
                'NFM_NCSA_BROKER_NAME']  # Message broker user & passwd
            self._sub_ncsa_passwd = cdm[ROOT]['NFM_NCSA_BROKER_PASSWD']

            self._pub_base_name = cdm[ROOT]['BASE_BROKER_PUB_NAME']
            self._pub_base_passwd = cdm[ROOT]['BASE_BROKER_PUB_PASSWD']
            self._pub_ncsa_name = cdm[ROOT]['NCSA_BROKER_PUB_NAME']
            self._pub_ncsa_passwd = cdm[ROOT]['NCSA_BROKER_PUB_PASSWD']

            self._scbd_dict = cdm[ROOT]['SCOREBOARDS']
            self.distributor_dict = cdm[ROOT][XFER_COMPONENTS][DISTRIBUTORS]
        except KeyError as e:
            LOGGER.critical("CDM Dictionary error - missing Key")
            LOGGER.critical("Offending Key is %s", str(e))
            LOGGER.critical("Bailing Out...")
            sys.exit(99)

        self._base_msg_format = 'YAML'
        self._ncsa_msg_format = 'YAML'

        if 'BASE_MSG_FORMAT' in cdm[ROOT]:
            self._base_msg_format = cdm[ROOT][BASE_MSG_FORMAT]

        if 'NCSA_MSG_FORMAT' in cdm[ROOT]:
            self._ncsa_msg_format = cdm[ROOT][NCSA_MSG_FORMAT]

    def setup_consumer_threads(self):
        LOGGER.info('Building _base_broker_url')
        base_broker_url = "amqp://" + self._sub_base_name + ":" + \
                                            self._sub_base_passwd + "@" + \
                                            str(self._base_broker_addr)

        ncsa_broker_url = "amqp://" + self._sub_ncsa_name + ":" + \
                                            self._sub_ncsa_passwd + "@" + \
                                            str(self._ncsa_broker_addr)

        self.shutdown_event = threading.Event()

        # Set up kwargs that describe consumers to be started
        # The Archive Device needs three message consumers
        kws = {}
        md = {}
        md['amqp_url'] = ncsa_broker_url
        md['name'] = 'Thread-ncsa_foreman_ack_publish'
        md['queue'] = 'ncsa_foreman_ack_publish'
        md['callback'] = self.on_ack_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-ncsa_consume'
        md['queue'] = 'ncsa_consume'
        md['callback'] = self.on_pp_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        self.thread_manager = ThreadManager('thread-manager', kws,
                                            self.shutdown_event)
        self.thread_manager.start()

    def setup_scoreboards(self):
        # Create Redis Distributor table with Distributor info
        self.DIST_SCBD = DistributorScoreboard('NCSA_DIST_SCBD', \
                                               self._scbd_dict['NCSA_DIST_SCBD'], \
                                               self.distributor_dict)
        self.JOB_SCBD = JobScoreboard('NCSA_JOB_SCBD',
                                      self._scbd_dict['NCSA_JOB_SCBD'])
        self.ACK_SCBD = AckScoreboard('NCSA_ACK_SCBD',
                                      self._scbd_dict['NCSA_ACK_SCBD'])

    def shutdown(self):
        LOGGER.debug("NCSA Foreman: Shutting down Consumer threads.")
        self.shutdown_event.set()
        LOGGER.debug("Thread Manager shutting down and app exiting...")
        print("\n")
        os._exit(0)
def defaultDispatcher_logic(operation):
	if operation == "exit":
		ThreadManager.stopAllThreads()
		return "ok"
	else:
		return "unknown"
class AuxDevice:
    """ The Spec Device is a commandable device which coordinates the ingest of
        images from the telescope camera and then the transfer of those images to
        the base site archive storage.
        It receives jobs and divides and assigns the work to forwarders, records state and
        status change of forwarders, and sends messages accordingly.
    """
    COMPONENT_NAME = 'AUX_FOREMAN'
    AT_FOREMAN_CONSUME = "at_foreman_consume"
    ARCHIVE_CTRL_PUBLISH = "archive_ctrl_publish"
    ARCHIVE_CTRL_CONSUME = "archive_ctrl_consume"
    AT_FOREMAN_ACK_PUBLISH = "at_foreman_ack_publish"
    START_INTEGRATION_XFER_PARAMS = {}
    ACK_QUEUE = []
    CFG_FILE = 'L1SystemCfg.yaml'
    prp = toolsmod.prp
    DP = toolsmod.DP
    RAFT_LIST = []
    RAFT_CCD_LIST = ['00']

    def __init__(self, filename=None):
        """ Create a new instance of the Spectrograph Device class.
            Instantiate the instance, raise assertion error if already instantiated.
            Extract config values from yaml file.
            Store handler methods for each message type.
            Set up base broker url, publishers, and scoreboards. Consumer threads
            are started within a Thread Manager object so that they can be monitored
            for health and shutdown/joined cleanly when the app exits.

            :params filename: Deflaut 'L1SystemCfg.yaml'. Can be assigned by user.

            :return: None.
        """
        toolsmod.singleton(self)

        self._config_file = self.CFG_FILE
        if filename != None:
            self._config_file = filename

        LOGGER.info('Extracting values from Config dictionary')
        self.extract_config_values()

        #self.purge_broker(cdm['ROOT']['QUEUE_PURGES'])

        self._msg_actions = {
            'AT_START_INTEGRATION': self.process_at_start_integration,
            'AT_NEW_SESSION': self.set_session,
            #'AR_READOUT': self.process_dmcs_readout,
            'AUX_FWDR_HEALTH_CHECK_ACK': self.process_ack,
            'AUX_FWDR_XFER_PARAMS_ACK': self.process_ack,
            'AR_FWDR_READOUT_ACK': self.process_ack,
            'AR_ITEMS_XFERD_ACK': self.process_ack,
            'AT_HEADER_READY': self.process_header_ready_event,
            'NEW_ARCHIVE_ITEM_ACK': self.process_ack,
            #'AUX_TAKE_IMAGES': self.take_images,
            'AT_END_READOUT': self.process_at_end_readout
        }

        self._next_timed_ack_id = 0

        self.setup_publishers()

        LOGGER.info('ar foreman consumer setup')
        self.thread_manager = None
        self.setup_consumer_threads()

        LOGGER.info('Archive Foreman Init complete')

    def setup_publishers(self):
        """ Set up base publisher with pub_base_broker_url by creating a new instance
            of SimplePublisher class with yaml format

            :params: None.

            :return: None.
        """
        self.pub_base_broker_url = "amqp://" + self._msg_pub_name + ":" + \
                                            self._msg_pub_passwd + "@" + \
                                            str(self._base_broker_addr)
        LOGGER.info('Setting up Base publisher on %s using %s',
                    self.pub_base_broker_url, self._base_msg_format)
        self._publisher = SimplePublisher(self.pub_base_broker_url,
                                          self._base_msg_format)

    def on_aux_foreman_message(self, ch, method, properties, body):
        """ Calls the appropriate AR message action handler according to message type.

            :params ch: Channel to message broker, unused unless testing.
            :params method: Delivery method from Pika, unused unless testing.
            :params properties: Properties from DMCS to AR Foreman callback message
                                body, unused unless testing.
            :params body: A dictionary that stores the message body.

            :return: None.
        """
        #msg_dict = yaml.load(body)
        ch.basic_ack(method.delivery_tag)
        msg_dict = body
        LOGGER.info('In AUX Foreman message callback')
        LOGGER.info(
            'Message from DMCS to AUX Foreman callback message body is: %s',
            str(msg_dict))
        print("Incoming AUX msg is: %s" % msg_dict)
        handler = self._msg_actions.get(msg_dict[MSG_TYPE])
        result = handler(msg_dict)

    def on_archive_message(self, ch, method, properties, body):
        """ Calls the appropriate AR message action handler according to message type.

            :params ch: Channel to message broker, unused unless testing.
            :params method: Delivery method from Pika, unused unless testing.
            :params properties: Properties from AR CTRL callback message body,
                                unused unless testing.
            :params body: A dictionary that stores the message body.

            :return: None.
        """
        ch.basic_ack(method.delivery_tag)
        LOGGER.info('AR CTRL callback msg body is: %s', str(body))

        handler = self._msg_actions.get(msg_dict[MSG_TYPE])
        result = handler(msg_dict)

    def on_ack_message(self, ch, method, properties, body):
        """ Calls the appropriate AR message action handler according to message type.

            :params ch: Channel to message broker, unused unless testing.
            :params method: Delivery method from Pika, unused unless testing.
            :params properties: Properties from ACK callback message body, unused
                                unless testing.
            :params body: A dictionary that stores the message body.

            :return: None.
        """
        ch.basic_ack(method.delivery_tag)
        msg_dict = body
        print("")
        print("")
        print("")
        print("RECEIVING ack MESSAGE:")
        print(msg_dict)
        print("")
        print("")
        print("")

        # XXX FIX Ignoring all log messages
        return

        LOGGER.info('In ACK message callback')
        LOGGER.info('Message from ACK callback message body is: %s',
                    str(msg_dict))

        # XXX FIX Ignoring all log messages
        return

        handler = self._msg_actions.get(msg_dict[MSG_TYPE])
        result = handler(msg_dict)

    def process_at_start_integration(self, params):
        # When this method is invoked, the following must happen:
        #    1) Health check all forwarders
        #    2) Divide work and generate dict of forwarders and which rafts/ccds they are fetching
        #    3) Inform each forwarder which rafts they are responsible for
        # receive new job_number and image_id; session and visit are current
        # and deep copy it with some additions such as session and visit
        # These next three lines must have WFS and Guide sensor info added
        start_int_ack_id = params[ACK_ID]

        print("Incoming AUX AT_Start Int msg")
        # next, run health check
        self.ACK_QUEUE = {}
        health_check_ack_id = self.get_next_timed_ack_id('AUX_FWDR_HEALTH_ACK')
        num_fwdrs_checked = self.fwdr_health_check(health_check_ack_id)

        # Add job scbd entry
        self.ack_timer(1.4)

        #healthy_fwdrs = self.ACK_QUEUE.get_components_for_timed_ack(health_check_ack_id)
        #if healthy_fwdrs == None:
        #    self.refuse_job(params, "No forwarders available")
        #    ### FIX send error code for this...
        #    return

        fwdr_names = list(self._forwarder_dict.keys())
        self._current_fwdr = self._forwarder_dict[fwdr_names[0]]

        # Add archive check when necessary...
        # send new_archive_item msg to archive controller
        #start_int_params = {}
        #ac_timed_ack = self.get_next_timed_ack_id('AUX_CTRL_NEW_ITEM')
        #start_int_params[MSG_TYPE] = 'NEW_ARCHIVE_ITEM'
        #start_int_params['ACK_ID'] = ac_timed_ack
        #start_int_params['JOB_NUM'] = job_number
        #start_int_params['SESSION_ID'] = session_id
        #start_int_params['VISIT_ID'] = visit_id
        #start_int_params['IMAGE_ID'] = image_id
        #start_int_params['REPLY_QUEUE'] = self.AUX_FOREMAN_ACK_PUBLISH
        #self.JOB_SCBD.set_job_state(job_number, 'AR_NEW_ITEM_QUERY')
        #self._publisher.publish_message(self.ARCHIVE_CTRL_CONSUME, start_int_params)

        #ar_response = self.progressive_ack_timer(ac_timed_ack, 1, 2.0)

        #if ar_response == None:
        #   FIXME raise L1 exception and bail out
        #   print("B-B-BAD Trouble; no ar_response")

        #target_dir = ar_response['ARCHIVE_CTRL']['TARGET_DIR']
        target_dir = self.archive_xfer_root
        #self.JOB_SCBD.set_job_params(job_number, {'STATE':'AR_NEW_ITEM_RESPONSE', 'TARGET_DIR': dir})

        # divide image fetch across forwarders
        #list_of_fwdrs = list(healthy_fwdrs.keys())
        #work_schedule = self.divide_work(list_of_fwdrs, raft_list, raft_ccd_list)

        # send target dir, and job, session,visit and work to do to healthy forwarders
        #self.JOB_SCBD.set_value_for_job(job_number, 'STATE','SENDING_XFER_PARAMS')
        #set_sched_result = self.JOB_SCBD.set_work_schedule_for_job(job_number, work_schedule)
        #if set_sched_result == False:
        # FIXME Raise L1 exception and bail
        #    print("BIG PROBLEM - CANNOT SET WORK SCHED IN SCBD")

        xfer_params_ack_id = self.get_next_timed_ack_id("AT_FWDR_PARAMS_ACK")

        fwdr_new_target_params = {}
        fwdr_new_target_params['XFER_PARAMS'] = {}
        fwdr_new_target_params[MSG_TYPE] = 'AT_FWDR_XFER_PARAMS'
        #fwdr_new_target_params[SESSION_ID] = session_id
        fwdr_new_target_params[IMAGE_ID] = params[IMAGE_ID]
        fwdr_new_target_params['IMAGE_INDEX'] = params['IMAGE_INDEX']
        #fwdr_new_target_params[VISIT_ID] = visit_id
        #fwdr_new_target_params[JOB_NUM] = job_number
        fwdr_new_target_params[ACK_ID] = xfer_params_ack_id
        fwdr_new_target_params[REPLY_QUEUE] = self.AT_FOREMAN_ACK_PUBLISH
        target_location = self.archive_name + "@" + self.archive_ip + ":" + target_dir
        fwdr_new_target_params['TARGET_LOCATION'] = target_location

        xfer_params_dict = {}
        xfer_params_dict['RAFT_LIST'] = self._wfs_raft
        #xfer_params_dict['RAFT_LIST'] = self.RAFT_LIST
        #xfer_params_dict['RAFT_LIST'].append(self.RAFT_LIST)
        #xfer_params_dict['RAFT_CCD_LIST'] = []
        #xfer_params_dict['RAFT_CCD_LIST'].append(self.RAFT_CCD_LIST)
        xfer_params_dict['AT_FWDR'] = self._current_fwdr
        fwdr_new_target_params['XFER_PARAMS'] = xfer_params_dict
        route_key = self._current_fwdr["CONSUME_QUEUE"]
        self._publisher.publish_message(route_key, fwdr_new_target_params)
        """
        # receive ack back from forwarders that they have job params
        params_acks = self.progressive_ack_timer(xfer_params_ack_id, len_fwdrs_list, 3.0)

        ### FIX
        #   if params_acks == None:
        #     raise L1Exception and bail

        self.JOB_SCBD.set_value_for_job(job_number,'STATE','XFER_PARAMS_SENT')

        # accept job by Ach'ing True
        st_int_params_ack = {}
        st_int_params_ack['MSG_TYPE'] = 'AR_START_INTEGRATION_ACK'
        st_int_params_ack['ACK_ID'] = start_int_ack_id
        st_int_params_ack['ACK_BOOL'] = True
        st_int_params_ack['JOB_NUM'] = job_number
        st_int_params_ack['SESSION_ID'] = session_id
        st_int_params_ack['IMAGE_ID'] = image_id
        st_int_params_ack['VISIT_ID'] = visit_id
        st_int_params_ack['COMPONENT'] = self.COMPONENT_NAME
        self.accept_job(st_int_params_ack)

        self.JOB_SCBD.set_value_for_job(job_number, STATE, "JOB_ACCEPTED")
        fscbd_params = {'STATE':'AWAITING_READOUT'}
        self.FWD_SCBD.set_forwarder_params(healthy_fwdrs, fscbd_params)
        """

    def fwdr_health_check(self, ack_id):
        """ Send AR_FWDR_HEALTH_CHECK message to ar_foreman_ack_publish queue.
            Retrieve available forwarders from ForwarderScoreboard, set their state to
            HEALTH_CHECK, status to UNKNOWN, and publish the message.

            :params ack_id: Ack id for AR forwarder health check.

            :return: Number of health checks sent.
        """
        msg_params = {}
        msg_params[MSG_TYPE] = 'AT_FWDR_HEALTH_CHECK'
        msg_params[ACK_ID] = ack_id
        msg_params[REPLY_QUEUE] = self.AT_FOREMAN_ACK_PUBLISH

        forwarders = list(self._forwarder_dict.keys())
        for x in range(0, len(forwarders)):
            route_key = self._forwarder_dict[forwarders[x]]["CONSUME_QUEUE"]
            self._publisher.publish_message(route_key, msg_params)
        return len(forwarders)

    def divide_work(self, fwdrs_list, raft_list, raft_ccd_list):
        """ Divide work (ccds) among forwarders.

            If only one forwarder available, give it all the work.
            If have less or equal ccds then forwarders, give the first few forwarders one
            ccd each.
            Else, evenly distribute ccds among forwarders, and give extras to the first
            forwarder, make sure that ccd list for each forwarder is continuous.

            :params fwdrs_list: List of available forwarders for the job.
            :params ccd_list: List of ccds to be distributed.

            :return schedule: Distribution of ccds among forwarders.
        """
        num_fwdrs = len(fwdrs_list)
        num_rafts = len(raft_list)

        schedule = {}
        schedule['FORWARDER_LIST'] = []
        schedule['CCD_LIST'] = [
        ]  # A list of ccd lists; index of main list matches same forwarder list index
        FORWARDER_LIST = []
        RAFT_LIST = []  # This is a 'list of lists'
        RAFT_CCD_LIST = []  # This is a 'list of lists'
        if num_fwdrs == 1:
            FORWARDER_LIST.append(fwdrs_list[0])
            RAFT_LIST = deepcopy(raft_list)
            RAFT_CCD_LIST = deepcopy(raft_ccd_list)
            schedule['FORWARDER_LIST'] = FORWARDER_LIST
            schedule['RAFT_LIST'] = RAFT_LIST
            schedule['RAFT_CCD_LIST'] = RAFT_CCD_LIST
            return schedule

        if num_rafts <= num_fwdrs:
            for k in range(0, num_rafts):
                FORWARDER_LIST.append(fwdrs_list[k])
                #little_list.append(ccd_list[k])
                RAFT_LIST.append(raft_list[k])  # Need a copy here...
                RAFT_CCD_LIST.append = deepcopy(raft_ccd_list[k])
                schedule['FORWARDER_LIST'] = FORWARDER_LIST
                schedule['RAFT_LIST'] = RAFT_LIST
                schedule['RAFT_CCD_LIST'] = RAFT_CCD_LIST

        else:
            rafts_per_fwdr = len(raft_list) // num_fwdrs
            remainder_rafts = len(raft_list) % num_fwdrs
            offset = 0
            for i in range(0, num_fwdrs):
                tmp_list = []
                tmp_raft_list = []
                for j in range(offset, (rafts_per_fwdr + offset)):
                    if (j) >= num_rafts:
                        break
                    tmp_list.append(raft_list[j])
                    tmp_raft_list.append(deepcopy(raft_ccd_list[j]))
                offset = offset + rafts_per_fwdr

                # If num_fwdrs divided into num_rafts equally, we are done...else, deal with remainder
                if remainder_rafts != 0 and i == 0:
                    for k in range(offset, offset + remainder_rafts):
                        tmp_list.append(raft_list[k])
                        tmp_raft_list.append(deepcopy(raft_ccd_list[k]))
                    offset = offset + remainder_rafts
                FORWARDER_LIST.append(fwdrs_list[i])
                RAFT_LIST.append(list(tmp_list))
                RAFT_CCD_LIST.append(list(tmp_raft_list))
            schedule['FORWARDER_LIST'] = FORWARDER_LIST
            schedule['RAFT_LIST'] = RAFT_LIST
            schedule['RAFT_CCD_LIST'] = RAFT_CCD_LIST

        return schedule

    def accept_job(self, dmcs_message):
        """ Send AR_START_INTEGRATION_ACK message with ack_bool equals True (job accepted)
            and other job specs to dmcs_ack_consume queue.

            :params dmcs_message: A dictionary that stores info of a job.

            :return: None.
        """
        self._publisher.publish_message("dmcs_ack_consume", dmcs_message)

    def refuse_job(self, params, fail_details):
        """ Send AR_START_INTEGRATION_ACK message with ack_bool equals False (job refused)
            and other job specs to dmcs_ack_consume queue.

            Set job state as JOB_REFUSED in JobScoreboard.

            :params parmas: A dictionary that stores info of a job.

            :params fail_details: A string that describes what went wrong, not used for now.

            :return: None.
        """
        dmcs_message = {}
        dmcs_message[JOB_NUM] = params[JOB_NUM]
        dmcs_message[MSG_TYPE] = 'AR_START_INTEGRATION_ACK'
        dmcs_message['ACK_ID'] = params['ACK_ID']
        dmcs_message['SESSION_ID'] = params['SESSION_ID']
        dmcs_message['VISIT_ID'] = params['VISIT_ID']
        dmcs_message['IMAGE_ID'] = params['IMAGE_ID']
        dmcs_message[ACK_BOOL] = False
        dmcs_message['COMPONENT'] = self.COMPONENT_NAME
        self.JOB_SCBD.set_value_for_job(params[JOB_NUM], STATE, "JOB_REFUSED")
        self._publisher.publish_message("dmcs_ack_consume", dmcs_message)

    def process_at_end_readout(self, params):
        """ Set job state as PREPARE_READOUT in JobScoreboard.
            Send readout to forwarders.
            Set job state as READOUT_STARTED in JobScoreboard.
            Wait to retrieve and process readout responses.

            :params parmas: A dictionary that stores info of a job.

            :return: None.
        """
        print("Incoming AUX AT_END_READOUT msg")
        reply_queue = params['REPLY_QUEUE']
        readout_ack_id = params[ACK_ID]
        #job_number = params[JOB_NUM]
        image_id = params[IMAGE_ID]
        # send readout to forwarders
        #self.JOB_SCBD.set_value_for_job(job_number, 'STATE', 'READOUT')
        fwdr_readout_ack = self.get_next_timed_ack_id("AR_FWDR_READOUT_ACK")
        #work_schedule = self.JOB_SCBD.get_work_schedule_for_job(job_number)
        current_fwdr = self._current_fwdr
        msg = {}
        msg[MSG_TYPE] = 'AT_FWDR_END_READOUT'
        #msg[JOB_NUM] = job_number
        msg[IMAGE_ID] = image_id
        msg['IMAGE_INDEX'] = params['IMAGE_INDEX']
        route_key = self._current_fwdr['CONSUME_QUEUE']
        self._publisher.publish_message(route_key, msg)

        #readout_responses = self.progressive_ack_timer(fwdr_readout_ack, len(fwdrs), 4.0)

        # if readout_responses == None:
        #    raise L1 exception

        #self.process_readout_responses(readout_ack_id, reply_queue, image_id, readout_responses)

    def process_readout_responses(self, readout_ack_id, reply_queue, image_id,
                                  readout_responses):
        """ From readout_responses param, retrieve image_id and job_number, and create list of
            ccd, filename, and checksum from all forwarders. Store into xfer_list_msg and
            send to archive to confirm each file made it intact.
            Send AR_READOUT_ACK message with results and ack_bool equals True to
            dmcs_ack_comsume queue.


            :params readout_ack_id: Ack id for AR_READOUT_ACK message.
            :params image_id:
            :params readout_responses: Readout responses from AckScoreboard.

            :return: None.
        """
        job_number = None
        image_id = None
        confirm_ack = self.get_next_timed_ack_id('AR_ITEMS_XFERD_ACK')
        fwdrs = list(readout_responses.keys())
        CCD_LIST = []
        FILENAME_LIST = []
        CHECKSUM_LIST = []
        for fwdr in fwdrs:
            ccds = readout_responses[fwdr]['RESULT_LIST']['CCD_LIST']
            num_ccds = len(ccds)
            fnames = readout_responses[fwdr]['RESULT_LIST']['FILENAME_LIST']
            csums = readout_responses[fwdr]['RESULT_LIST']['CHECKSUM_LIST']
            for i in range(0, num_ccds):
                msg = {}
                CCD_LIST.append(ccds[i])
                FILENAME_LIST.append(fnames[i])
                CHECKSUM_LIST.append(csums[i])
        job_number = readout_responses[fwdr][JOB_NUM]
        image_id = readout_responses[fwdr]['IMAGE_ID']
        xfer_list_msg = {}
        xfer_list_msg[MSG_TYPE] = 'AR_ITEMS_XFERD'
        xfer_list_msg[ACK_ID] = confirm_ack
        xfer_list_msg['IMAGE_ID'] = image_id
        xfer_list_msg['REPLY_QUEUE'] = self.AR_FOREMAN_ACK_PUBLISH
        xfer_list_msg['RESULT_LIST'] = {}
        xfer_list_msg['RESULT_LIST']['CCD_LIST'] = CCD_LIST
        xfer_list_msg['RESULT_LIST']['FILENAME_LIST'] = FILENAME_LIST
        xfer_list_msg['RESULT_LIST']['CHECKSUM_LIST'] = CHECKSUM_LIST
        self._publisher.publish_message(self.ARCHIVE_CTRL_CONSUME,
                                        xfer_list_msg)

        xfer_check_responses = self.progressive_ack_timer(confirm_ack, 1, 4.0)

        # if xfer_check_responses == None:
        #    raise L1 exception and bail

        results = xfer_check_responses['ARCHIVE_CTRL']['RESULT_LIST']

        ack_msg = {}
        ack_msg['MSG_TYPE'] = 'AR_READOUT_ACK'
        ack_msg['JOB_NUM'] = job_number
        ack_msg['COMPONENT'] = self.COMPONENT_NAME
        ack_msg['ACK_ID'] = readout_ack_id
        ack_msg['ACK_BOOL'] = True
        ack_msg['RESULT_LIST'] = results
        self._publisher.publish_message(reply_queue, ack_msg)

        ### FIXME Set state as complete for Job

    def send_readout(self, params, fwdrs, readout_ack):
        """ Send AR_FWDR_READOUT message to each forwarder working on the job with
            ar_foreman_ack_publish queue as reply queue.

            :params params: A dictionary that stores info of a job.
            :params readout_ack: Ack id for AR_FWDR_READOUT message.

            :return: None.
        """
        ro_params = {}
        job_number = params['JOB_NUM']
        ro_params['MSG_TYPE'] = 'AR_FWDR_READOUT'
        ro_params['JOB_NUM'] = job_number
        ro_params['SESSION_ID'] = self.get_current_session()
        ro_params['VISIT_ID'] = self.get_current_visit()
        ro_params['IMAGE_ID'] = params['IMAGE_ID']
        ro_params['ACK_ID'] = readout_ack
        ro_params['REPLY_QUEUE'] = self.AR_FOREMAN_ACK_PUBLISH
        for fwdr in fwdrs:
            route_key = self.FWD_SCBD.get_value_for_forwarder(
                fwdr, "CONSUME_QUEUE")
            self._publisher.publish_message(route_key, ro_params)

    def process_header_ready_event(self, params):
        fname = params['FILENAME']
        image_id = params['IMAGE_ID']
        msg = {}
        msg['MSG_TYPE'] = 'AT_FWDR_HEADER_READY'
        msg['FILENAME'] = fname
        msg['IMAGE_ID'] = image_id

        #XXX FIX remove hard code queue
        #route_key = self._current_fwdr['CONSUME_QUEUE']
        route_key = "f99_consume"
        self._publisher.publish_message(route_key, msg)

    def take_images_done(self, params):
        reply_queue = params['REPLY_QUEUE']
        readout_ack_id = params[ACK_ID]
        job_number = params[JOB_NUM]
        self.JOB_SCBD.set_value_for_job(job_number, 'STATE',
                                        'TAKE_IMAGES_DONE')
        fwdr_readout_ack = self.get_next_timed_ack_id(
            "AR_FWDR_TAKE_IMAGES_DONE_ACK")
        work_schedule = self.JOB_SCBD.get_work_schedule_for_job(job_number)
        fwdrs = work_schedule['FORWARDER_LIST']
        len_fwdrs = len(fwdrs)
        msg = {}
        msg[MSG_TYPE] = 'AR_FWDR_TAKE_IMAGES_DONE'
        msg[JOB_NUM] = job_number
        msg[ACK_ID] = fwdr_readout_ack
        for i in range(0, len_fwdrs):
            route_key = self.FWDR_SCBD.get_value_for_forwarder(
                fwdrs[i], 'CONSUME_QUEUE')
            self._publisher.publish_message(route_key, msg)

        ### FIX Add Final Response to DMCS

    def process_ack(self, params):
        """ Add new ACKS for a particular ACK_ID to the Ack Scoreboards
            where they are collated.

            :params: New ack to be checked in.

            :return: None.
        """
        pass
        #self.ACK_SCBD.add_timed_ack(params)

    def get_next_timed_ack_id(self, ack_type):
        """ Increment ack id by 1, and store it.
            Return ack id with ack type as a string.

            :params ack_type: Informational string to prepend Ack ID.

            :return retval: String with ack type followed by next ack id.
        """
        self._next_timed_ack_id = self._next_timed_ack_id + 1
        return (ack_type + "_" + str(self._next_timed_ack_id).zfill(6))

    def set_session(self, params):
        pass
        """ Record new session in JobScoreboard.
            Send AR_NEW_SESSION_ACK message with ack_bool equals True to specified reply queue.

            :params params: Dictionary with info about new session.

            :return: None.
        self.JOB_SCBD.set_session(params['SESSION_ID'])
        ack_id = params['ACK_ID']
        msg = {}
        msg['MSG_TYPE'] = 'AR_NEW_SESSION_ACK'
        msg['COMPONENT'] = self.COMPONENT_NAME
        msg['ACK_ID'] = ack_id
        msg['ACK_BOOL'] = True
        route_key = params['REPLY_QUEUE'] 
        self._publisher.publish_message(route_key, msg)
        """

    def get_current_session(self):
        """ Retreive current session from JobSocreboard.

            :params: None.

            :return: Current session returned by JobSocreboard.
        """
        return self.JOB_SCBD.get_current_session()

    def set_visit(self, params):
        """ Set current visit_id in JobScoreboard.
            Send AR_NEXT_VISIT_ACK message with ack_bool equals True to specified reply queue.

            :params params: Message dictionary with info about new visit.

            :return: None.
        """
        bore_sight = params['BORE_SIGHT']
        self.JOB_SCBD.set_visit_id(params['VISIT_ID'], bore_sight)
        ack_id = params['ACK_ID']
        msg = {}
        ## XXX FIXME Do something with the bore sight in params['BORE_SIGHT']
        msg['MSG_TYPE'] = 'AR_NEXT_VISIT_ACK'
        msg['COMPONENT'] = self.COMPONENT_NAME
        msg['ACK_ID'] = ack_id
        msg['ACK_BOOL'] = True
        route_key = params['REPLY_QUEUE']
        self._publisher.publish_message(route_key, msg)

    def get_current_visit(self):
        """ Retrieve current visit from JobSocreboard.

            :params: None.

            :return: Current visit returned by JobSocreboard.
        """
        return self.JOB_SCBD.get_current_visit()

    def ack_timer(self, seconds):
        """ Sleeps for user-defined seconds.

            :params seconds: Time to sleep in seconds.

            :return: True.
        """
        sleep(seconds)
        return True

    def progressive_ack_timer(self, ack_id, expected_replies, seconds):
        """ Sleeps for user-defined seconds, or less if everyone has reported back in.

            :params ack_id: Ack ID to wait for.

            :params expected_replies: Number of components expected to ack..

            :params seconds: Maximum time to wait in seconds.

            :return: The dictionary that represents the responses from the components ack'ing.
                     Note: If only one component will ack, this method breaks out of its
                           loop after the one ack shows up - effectively beating the maximum
                           wait time.
        """
        counter = 0.0
        while (counter < seconds):
            counter = counter + 0.5
            sleep(0.5)
            response = self.ACK_SCBD.get_components_for_timed_ack(ack_id)
            if response == None:
                continue
            if len(list(response.keys())) == expected_replies:
                return response

        ## Try one final time
        response = self.ACK_SCBD.get_components_for_timed_ack(ack_id)
        if response == None:
            return None
        elif len(list(response.keys())) == expected_replies:
            return response
        else:
            return None

    def extract_config_values(self):
        """ Parse system config yaml file.
            Throw error messages if Yaml file or key not found.

            :params: None.

            :return: True.
        """
        LOGGER.info('Reading YAML Config file %s' % self._config_file)
        try:
            cdm = toolsmod.intake_yaml_file(self._config_file)
        except IOError as e:
            LOGGER.critical("Unable to find CFG Yaml file %s\n" %
                            self._config_file)
            sys.exit(101)

        try:
            self._msg_name = cdm[ROOT][
                'AUX_BROKER_NAME']  # Message broker user & passwd
            self._msg_passwd = cdm[ROOT]['AUX_BROKER_PASSWD']
            self._msg_pub_name = cdm[ROOT][
                'AUX_BROKER_PUB_NAME']  # Message broker user & passwd
            self._msg_pub_passwd = cdm[ROOT]['AUX_BROKER_PUB_PASSWD']
            self._base_broker_addr = cdm[ROOT][BASE_BROKER_ADDR]
            self._forwarder_dict = cdm[ROOT][XFER_COMPONENTS]['AUX_FORWARDERS']
            self._wfs_raft = cdm[ROOT]['ATS']['WFS_RAFT']

            # Placeholder until eventually worked out by Data Backbone team
            self.archive_fqn = cdm[ROOT]['ARCHIVE']['ARCHIVE_NAME']
            self.archive_name = cdm[ROOT]['ARCHIVE']['ARCHIVE_LOGIN']
            self.archive_ip = cdm[ROOT]['ARCHIVE']['ARCHIVE_IP']
            self.archive_xfer_root = cdm[ROOT]['ARCHIVE']['ARCHIVE_XFER_ROOT']
        except KeyError as e:
            print("Dictionary error")
            print("Bailing out...")
            sys.exit(99)

        self._base_msg_format = 'YAML'

        if 'BASE_MSG_FORMAT' in cdm[ROOT]:
            self._base_msg_format = cdm[ROOT]['BASE_MSG_FORMAT']

    def setup_consumer_threads(self):
        """ Create ThreadManager object with base broker url and kwargs to setup consumers.

            :params: None.

            :return: None.
        """
        base_broker_url = "amqp://" + self._msg_name + ":" + \
                                            self._msg_passwd + "@" + \
                                            str(self._base_broker_addr)
        LOGGER.info('Building _base_broker_url. Result is %s', base_broker_url)

        self.shutdown_event = threading.Event()
        self.shutdown_event.clear()

        # Set up kwargs that describe consumers to be started
        # The Archive Device needs three message consumers
        kws = {}
        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-aux_foreman_consume'
        md['queue'] = 'at_foreman_consume'
        md['callback'] = self.on_aux_foreman_message
        md['format'] = "YAML"
        md['test_val'] = None
        kws[md['name']] = md

        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-at_foreman_ack_publish'
        md['queue'] = 'at_foreman_ack_publish'
        md['callback'] = self.on_ack_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-archive_ctrl_publish'
        md['queue'] = 'archive_ctrl_publish'
        md['callback'] = self.on_archive_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        self.thread_manager = ThreadManager('thread-manager', kws,
                                            self.shutdown_event)
        self.thread_manager.start()

    def shutdown(self):
        LOGGER.info("Shutting down Consumer threads.")
        self.shutdown_event.set()
        LOGGER.debug("Thread Manager shutting down and app exiting...")
        print("\n")
        os._exit(0)
class ArchiveController:

    ARCHIVE_CTRL_PUBLISH = "archive_ctrl_publish"
    ARCHIVE_CTRL_CONSUME = "archive_ctrl_consume"
    ACK_PUBLISH = "ar_foreman_ack_publish"
    AUDIT_CONSUME = "audit_consume"
    YAML = 'YAML'
    RECEIPT_FILE = "/var/archive/archive_controller_receipt"

    def __init__(self, filename=None):
        self._session_id = None
        self._name = "ARCHIVE_CTRL"
        self._config_file = 'L1SystemCfg.yaml'
        if filename != None:
            self._config_file = filename

        cdm = toolsmod.intake_yaml_file(self._config_file)

        try:
            self._archive_name = cdm[ROOT][
                'ARCHIVE_BROKER_NAME']  # Message broker user/passwd for component
            self._archive_passwd = cdm[ROOT]['ARCHIVE_BROKER_PASSWD']
            self._base_broker_addr = cdm[ROOT][BASE_BROKER_ADDR]
            self._archive_xfer_root = cdm[ROOT]['ARCHIVE']['ARCHIVE_XFER_ROOT']
            if cdm[ROOT]['ARCHIVE']['CHECKSUM_ENABLED'] == 'yes':
                self.CHECKSUM_ENABLED = True
            else:
                self.CHECKSUM_ENABLED = False
        except KeyError as e:
            raise L1Error(e)

        self._base_msg_format = self.YAML

        if 'BASE_MSG_FORMAT' in cdm[ROOT]:
            self._base_msg_format = cdm[ROOT][BASE_MSG_FORMAT]

        self._base_broker_url = "amqp://" + self._archive_name + ":" + self._archive_passwd + "@" + str(
            self._base_broker_addr)

        LOGGER.info(
            'Building _base_broker_url connection string for Archive Controller. Result is %s',
            self._base_broker_url)

        self._msg_actions = {
            'ARCHIVE_HEALTH_CHECK': self.process_health_check,
            'NEW_ARCHIVE_ITEM': self.process_new_archive_item,
            'AR_ITEMS_XFERD': self.process_transfer_complete
        }

        self.setup_consumer()
        self.setup_publisher()

    def setup_consumer(self):
        LOGGER.info('Setting up archive consumers on %s',
                    self._base_broker_url)
        LOGGER.info('Running start_new_thread for archive consumer')

        self.shutdown_event = threading.Event()
        self.shutdown_event.clear()

        kws = {}
        md = {}
        md['amqp_url'] = self._base_broker_url
        md['name'] = 'Thread-ar_ctrl_consume'
        md['queue'] = self.ARCHIVE_CTRL_CONSUME
        md['callback'] = self.on_archive_message
        md['format'] = "YAML"
        md['test_val'] = None
        kws[md['name']] = md

        self.thread_manager = ThreadManager('thread-manager', kws,
                                            self.shutdown_event)
        self.thread_manager.start()

    def setup_publisher(self):
        LOGGER.info('Setting up Archive publisher on %s using %s',
                    self._base_broker_url, self._base_msg_format)
        self._archive_publisher = SimplePublisher(self._base_broker_url,
                                                  self._base_msg_format)
        #self._audit_publisher = SimplePublisher(self._base_broker_url, self._base_msg_format)

    def on_archive_message(self, ch, method, properties, msg_dict):
        LOGGER.info('Message from Archive callback message body is: %s',
                    str(msg_dict))
        ch.basic_ack(method.delivery_tag)
        handler = self._msg_actions.get(msg_dict[MSG_TYPE])
        result = handler(msg_dict)

    def process_health_check(self, params):
        """Input 'params' for this method is a dict made up of:
           :param str 'MESSAGE_TYPE' value  is 'ARCHIVE_HEALTH_CHECK'
           :param str 'ACK_ID' value  is an alphanumeric string, with 
               the numeric part a momotonically increasing sequence. 
               This value is passed back to the foreman and used to keep 
               track of acknowledgement time.
           :param str 'SESSION_ID' Might be useful for the controller to 
               generate a target location for new items to be archived?
        """
        self.send_audit_message("received_", params)
        self.send_health_ack_response("ARCHIVE_HEALTH_CHECK_ACK", params)

    def process_new_archive_item(self, params):
        self.send_audit_message("received_", params)
        target_dir = self.construct_send_target_dir(params)
        self.send_new_item_ack(target_dir, params)

    def process_transfer_complete(self, params):
        transfer_results = {}
        ccds = params['RESULT_LIST']['CCD_LIST']
        fnames = params['RESULT_LIST']['FILENAME_LIST']
        csums = params['RESULT_LIST']['CHECKSUM_LIST']
        num_ccds = len(ccds)
        transfer_results = {}
        RECEIPT_LIST = []
        for i in range(0, num_ccds):
            ccd = ccds[i]
            pathway = fnames[i]
            csum = csums[i]
            transfer_result = self.check_transferred_file(pathway, csum)
            if transfer_result == None:
                RECEIPT_LIST.append('0')
            else:
                RECEIPT_LIST.append(transfer_result)
        transfer_results['CCD_LIST'] = ccds
        transfer_results['RECEIPT_LIST'] = RECEIPT_LIST
        self.send_transfer_complete_ack(transfer_results, params)

    def check_transferred_file(self, pathway, csum):
        if not os.path.isfile(pathway):
            return ('-1')

        if self.CHECKSUM_ENABLED:
            with open(pathway) as file_to_calc:
                data = file_to_calc.read()
                resulting_md5 = hashlib.md5(data).hexdigest()

                if resulting_md5 != csum:
                    return ('0')

        return self.next_receipt_number()

    def next_receipt_number(self):
        last_receipt = toolsmod.intake_yaml_file(self.RECEIPT_FILE)
        current_receipt = int(last_receipt['RECEIPT_ID']) + 1
        session_dict = {}
        session_dict['RECEIPT_ID'] = current_receipt
        toolsmod.export_yaml_file(self.RECEIPT_FILE, session_dict)
        return current_receipt

    def send_health_ack_response(self, type, params):
        try:
            ack_id = params.get("ACK_ID")
            self._current_session_id = params.get("SESSION_ID")
        except:
            if ack_id == None:
                LOGGER.info('%s failed, missing ACK_ID field', type)
                raise L1MessageError(
                    "Missing ACK_ID message param needed for send_ack_response"
                )
            else:
                LOGGER.info('%s failed, missing SESSION_ID field', type)
                raise L1MessageError(
                    "Missing SESSION_ID param needed for send_ack_response")

        msg_params = {}
        msg_params[MSG_TYPE] = type
        msg_params[COMPONENT] = self._name
        msg_params[ACK_BOOL] = "TRUE"
        msg_params['ACK_ID'] = ack_id
        LOGGER.info('%s sent for ACK ID: %s', type, ack_id)
        self._archive_publisher.publish_message(self.ACK_PUBLISH, msg_params)

    def send_audit_message(self, prefix, params):
        audit_params = {}
        audit_params['SUB_TYPE'] = str(prefix) + str(
            params['MSG_TYPE']) + "_msg"
        audit_params['DATA_TYPE'] = self._name
        audit_params['TIME'] = get_epoch_timestamp()
        self._archive_publisher.publish_message(self.AUDIT_CONSUME,
                                                audit_params)

    def construct_send_target_dir(self, params):
        #session = params['SESSION_ID']
        visit = params['VISIT_ID']
        image = params['IMAGE_ID']
        ack_id = params['ACK_ID']
        #target_dir = self._archive_xfer_root + "_" + str(image_type) + "_" + str(session) + "_" + str(visit) + "_" + str(image)
        target_dir_visit = self._archive_xfer_root + visit + "/"
        target_dir_image = self._archive_xfer_root + visit + "/" + str(
            image) + "/"

        if os.path.isdir(target_dir_visit):
            pass
        else:
            os.mkdir(target_dir_visit, 0o766)

        if os.path.isdir(target_dir_image):
            pass
        else:
            os.mkdir(target_dir_image, 0o766)

        return target_dir_image

    def send_new_item_ack(self, target_dir, params):
        ack_params = {}
        ack_params[MSG_TYPE] = 'NEW_ARCHIVE_ITEM_ACK'
        ack_params['TARGET_DIR'] = target_dir
        ack_params['ACK_ID'] = params['ACK_ID']
        ack_params['JOB_NUM'] = params['JOB_NUM']
        ack_params['IMAGE_ID'] = params['IMAGE_ID']
        ack_params['COMPONENT'] = self._name
        ack_params['ACK_BOOL'] = True
        self._archive_publisher.publish_message(self.ACK_PUBLISH, ack_params)

    def send_transfer_complete_ack(self, transfer_results, params):
        ack_params = {}
        keez = list(params.keys())
        for kee in keez:
            if kee == 'MSG_TYPE' or kee == 'CCD_LIST':
                continue
            ### XXX FIXME Dump loop and just pull the correct values from the input params
            ack_params[kee] = params[kee]

        ack_params['MSG_TYPE'] = 'AR_ITEMS_XFERD_ACK'
        ack_params['COMPONENT'] = self._name
        ack_params['ACK_ID'] = params['ACK_ID']
        ack_params['ACK_BOOL'] = True
        ack_params['RESULTS'] = transfer_results

        self._archive_publisher.publish_message(self.ACK_PUBLISH, ack_params)
Exemple #11
0
class PromptProcessDevice:
    PP_JOB_SCBD = None
    PP_FWD_SCBD = None
    PP_ACK_SCBD = None
    COMPONENT_NAME = 'PROMPT_PROCESS_FOREMAN'
    PP_FOREMAN_CONSUME = "pp_foreman_consume"
    PP_FOREMAN_ACK_PUBLISH = "pp_foreman_ack_publish"
    PP_START_INTEGRATION_ACK = "PP_START_INTEGRATION_ACK"
    NCSA_PUBLISH = "ncsa_publish"
    NCSA_CONSUME = "ncsa_consume"
    NCSA_NO_RESPONSE = 5705
    FORWARDER_NO_RESPONSE = 5605
    FORWARDER_PUBLISH = "forwarder_publish"
    CFG_FILE = 'L1SystemCfg.yaml'
    ERROR_CODE_PREFIX = 5500
    prp = toolsmod.prp

    def __init__(self, filename=None):
        toolsmod.singleton(self)

        self._config_file = self.CFG_FILE
        if filename != None:
            self._config_file = filename

        LOGGER.info('Extracting values from Config dictionary')
        try:
            self.extract_config_values()
        except Exception as e:
            LOGGER.error("PP_Device problem configuring with file %s: %s" %
                         (self._config_file, e.arg))
            print("PP_Device unable to read Config file %s: %s" %
                  (self._config_file, e.arg))
            sys.exit(self.ErrorCodePrefix + 20)

        #self.purge_broker(cdm['ROOT']['QUEUE_PURGES'])

        self._msg_actions = {
            'PP_NEW_SESSION': self.set_session,
            'PP_NEXT_VISIT': self.set_visit,
            'PP_START_INTEGRATION': self.process_start_integration,
            'PP_READOUT': self.process_dmcs_readout,
            'NCSA_RESOURCE_QUERY_ACK': self.process_ack,
            'NCSA_START_INTEGRATION_ACK': self.process_ack,
            'NCSA_READOUT_ACK': self.process_ack,
            'PP_FWDR_HEALTH_CHECK_ACK': self.process_ack,
            'PP_FWDR_XFER_PARAMS_ACK': self.process_ack,
            'PP_FWDR_READOUT_ACK': self.process_ack,
            'PENDING_ACK': self.process_pending_ack,
            'NCSA_NEXT_VISIT_ACK': self.process_ack
        }

        self._next_timed_ack_id = 0

        try:
            self.setup_publishers()
        except L1PublisherError as e:
            LOGGER.error("PP_Device unable to start Publishers: %s" % e.arg)
            print("PP_Device unable to start Publishers: %s" % e.arg)
            sys.exit(self.ErrorCodePrefix + 31)

        self.setup_scoreboards()

        LOGGER.info('pp foreman consumer setup')
        self.thread_manager = None
        try:
            self.setup_consumer_threads()
        except L1Exception as e:
            LOGGER.error("PP_Device unable to launch ThreadManager: %s" %
                         e.arg)
            print("PP_Device unable to launch ThreadManager: %s" % e.arg)
            sys.exit(self.ErrorCodePrefix + 1)

        LOGGER.info('Prompt Process Foreman Init complete')

    def setup_publishers(self):
        self._pub_base_broker_url = "amqp://" + self._pub_name + ":" + \
                                                self._pub_passwd + "@" + \
                                                str(self._base_broker_addr)

        self._pub_ncsa_broker_url = "amqp://" + self._pub_ncsa_name + ":" + \
                                                self._pub_ncsa_passwd + "@" + \
                                                str(self._ncsa_broker_addr)

        try:
            LOGGER.info('Setting up Base publisher on %s using %s', \
                         self._pub_base_broker_url, self._base_msg_format)
            self._base_publisher = SimplePublisher(self._pub_base_broker_url,
                                                   self._base_msg_format)

            LOGGER.info('Setting up NCSA publisher on %s using %s', \
                         self._pub_ncsa_broker_url, self._ncsa_msg_format)
            self._ncsa_publisher = SimplePublisher(self._pub_ncsa_broker_url,
                                                   self._ncsa_msg_format)
        except Exception as e:
            LOGGER.error("PP_Device unable to start Publishers: %s" % e.arg)
            print("PP_Device unable to start Publishers: %s" % e.arg)
            raise L1PublisherError(
                "Critical Error: Unable to create Publishers: %s" % e.arg)

    def on_dmcs_message(self, ch, method, properties, body):
        ch.basic_ack(method.delivery_tag)
        #msg_dict = yaml.load(body)
        msg_dict = body
        LOGGER.info('In DMCS message callback')
        LOGGER.info('Message from DMCS callback message body is: %s',
                    str(msg_dict))

        handler = self._msg_actions.get(msg_dict[MSG_TYPE])
        result = handler(msg_dict)

    def on_forwarder_message(self, ch, method, properties, body):
        ch.basic_ack(method.delivery_tag)
        LOGGER.info('In Forwarder message callback, thread is %s',
                    _thread.get_ident())
        LOGGER.info('forwarder callback msg body is: %s', str(body))
        pass

    def on_ncsa_message(self, ch, method, properties, body):
        ch.basic_ack(method.delivery_tag)
        msg_dict = body
        LOGGER.info('ncsa msg callback body is: %s', str(msg_dict))

        handler = self._msg_actions.get(msg_dict[MSG_TYPE])
        result = handler(msg_dict)

    def on_ack_message(self, ch, method, properties, body):
        ch.basic_ack(method.delivery_tag)
        msg_dict = body
        LOGGER.info('In ACK message callback')
        LOGGER.info('Message from ACK callback message body is: %s',
                    str(msg_dict))

        handler = self._msg_actions.get(msg_dict[MSG_TYPE])
        result = handler(msg_dict)

    def set_session(self, params):
        self.JOB_SCBD.set_session(params['SESSION_ID'])
        ack_id = params['ACK_ID']
        msg = {}
        msg['MSG_TYPE'] = 'PP_NEW_SESSION_ACK'
        msg['COMPONENT'] = self.COMPONENT_NAME
        msg['ACK_ID'] = ack_id
        msg['ACK_BOOL'] = True
        route_key = params['REPLY_QUEUE']
        self._base_publisher.publish_message(route_key, msg)

    def set_visit(self, params):
        bore_sight = params['BORE_SIGHT']
        visit_id = params['VISIT_ID']
        self.JOB_SCBD.set_visit_id(visit_id, bore_sight)
        ack_id = params['ACK_ID']
        msg = {}

        ncsa_result = self.send_visit_boresight_to_ncsa(visit_id, bore_sight)

        msg['MSG_TYPE'] = 'PP_NEXT_VISIT_ACK'
        msg['COMPONENT'] = self.COMPONENT_NAME
        msg['ACK_ID'] = ack_id
        msg['ACK_BOOL'] = True
        route_key = params['REPLY_QUEUE']
        self._base_publisher.publish_message(route_key, msg)

    def send_visit_boresight_to_ncsa(self, visit_id, bore_sight):
        msg = {}
        msg['MSG_TYPE'] = 'NCSA_NEXT_VISIT'
        msg['VISIT_ID'] = visit_id
        msg['BORE_SIGHT'] = bore_sight
        msg['SESSION_ID'] = self.JOB_SCBD.get_current_session()
        ack_id = self.get_next_timed_ack_id('NCSA_NEXT_VISIT_ACK')
        msg['ACK_ID'] = ack_id
        msg['REPLY_QUEUE'] = self.PP_FOREMAN_ACK_PUBLISH
        self._ncsa_publisher.publish_message(self.NCSA_CONSUME, msg)

        wait_time = 4
        acks = []
        acks.append(ack_id)
        self.set_pending_nonblock_acks(acks, wait_time)

    def process_start_integration(self, input_params):
        """
        Add job to Job Scoreboard
        Check forwarder health
        Check Policy, bail if necessary
        Mark Forwarder scoreboard as a result of above
        Divide work and assemble as a forwarder dictionary for NCSA
        Send work division to NCSA
        Check Policy, bail if necessary
        Persist pairings to Job Scoreboard
        Send params to Forwarders
        Confirm Forwarder Acks
        Send confirm to DMCS
        """

        ccd_list = input_params['CCD_LIST']
        job_num = str(input_params[JOB_NUM])
        visit_id = input_params['VISIT_ID']
        image_id = input_params['IMAGE_ID']
        self.JOB_SCBD.add_job(job_num, image_id, visit_id, ccd_list)

        unknown_status = {"STATUS": "UNKNOWN", "STATE": "UNRESPONSIVE"}
        self.FWD_SCBD.setall_forwarder_params(unknown_status)

        ack_id = self.forwarder_health_check(input_params)

        self.ack_timer(2.5)
        healthy_forwarders = self.ACK_SCBD.get_components_for_timed_ack(ack_id)

        if healthy_forwarders == None:
            self.JOB_SCBD.set_job_state(job_number, 'SCRUBBED')
            self.JOB_SCBD.set_job_status(job_number, 'INACTIVE')
            self.send_fault("No Response From Forwarders",
                            self.FORWARDER_NO_RESPONSE, job_num,
                            self.COMPONENT_NAME)
            raise L1ForwarderError(
                "No response from any Forwarder when sending job params")

        healthy_forwarders_list = list(healthy_forwarders.keys())
        for forwarder in healthy_forwarders_list:
            self.FWD_SCBD.set_forwarder_state(forwarder, 'BUSY')
            self.FWD_SCBD.set_forwarder_status(forwarder, 'HEALTHY')

        num_healthy_forwarders = len(healthy_forwarders_list)

        ready_status = {"STATUS": "HEALTHY", "STATE": "READY_WITHOUT_PARAMS"}
        self.FWD_SCBD.set_forwarder_params(healthy_forwarders_list,
                                           ready_status)

        work_schedule = self.divide_work(healthy_forwarders_list, ccd_list)

        ack_id = self.ncsa_resources_query(input_params, work_schedule)

        ncsa_response = self.progressive_ack_timer(ack_id, 1, 2.0)

        #Check ACK scoreboard for response from NCSA
        if ncsa_response:
            pairs = []
            pairs = ncsa_response['NCSA_FOREMAN']['PAIRS']

            # Distribute job params and tell DMCS we are ready.
            fwd_ack_id = self.distribute_job_params(input_params, pairs)
            num_fwdrs = len(pairs)
            fwdr_params_response = self.progressive_ack_timer(
                fwd_ack_id, num_fwdrs, 3.0)

            if fwdr_params_response:
                self.JOB_SCBD.set_value_for_job(job_num, "STATE",
                                                "FWDR_PARAMS_RECEIVED")
                in_ready_state = {'STATE': 'READY_WITH_PARAMS'}
                self.FWD_SCBD.set_forwarder_params(healthy_forwarders_list,
                                                   in_ready_state)
                # Tell DMCS we are ready
                result = self.accept_job(input_params['ACK_ID'], job_num)
            else:
                idle_params = {'STATE': 'IDLE'}
                self.FWD_SCBD.set_forwarder_params(needed_forwarders,
                                                   idle_params)
                self.send_fault("No RESPONSE FROM NCSA FOREMAN",
                                self.NCSA_NO_RESPONSE, job_num,
                                self.COMPONENT_NAME)
                raise L1NcsaForemanError("No Response From NCSA Foreman")

        else:
            result = self.ncsa_no_response(input_params)
            idle_params = {'STATE': 'IDLE'}
            self.FWD_SCBD.set_forwarder_params(needed_forwarders, idle_params)
            return result

    def forwarder_health_check(self, params):
        # get timed_ack_id
        timed_ack = self.get_next_timed_ack_id("PP_FWDR_HEALTH_CHECK_ACK")

        forwarders = self.FWD_SCBD.return_forwarders_list()
        job_num = params[JOB_NUM]
        # send health check messages
        msg_params = {}
        msg_params[MSG_TYPE] = 'PP_FWDR_HEALTH_CHECK'
        msg_params['ACK_ID'] = timed_ack
        msg_params['REPLY_QUEUE'] = self.PP_FOREMAN_ACK_PUBLISH
        msg_params[JOB_NUM] = job_num

        self.JOB_SCBD.set_value_for_job(job_num, "STATE", "HEALTH_CHECK")
        for forwarder in forwarders:
            self._base_publisher.publish_message(
                self.FWD_SCBD.get_routing_key(forwarder), msg_params)

        return timed_ack

    def divide_work(self, fwdrs_list, ccd_list):
        num_fwdrs = len(fwdrs_list)
        num_ccds = len(ccd_list)

        schedule = {}
        schedule['FORWARDER_LIST'] = []
        schedule['CCD_LIST'] = [
        ]  # A list of ccd lists; index of main list matches same forwarder list index
        FORWARDER_LIST = []
        CCD_LIST = []  # This is a 'list of lists'
        if num_fwdrs == 1:
            FORWARDER_LIST.append(fwdrs_list[0])
            CCD_LIST.append(ccd_list)
            schedule['FORWARDERS_LIST'] = FORWARDER_LIST
            schedule['CCD_LIST'] = CCD_LIST
            return schedule

        if num_ccds <= num_fwdrs:
            for k in range(0, num_ccds):
                little_list = []
                FORWARDER_LIST.append(fwdrs_list[k])
                little_list.append(ccd_list[k])
                CCD_LIST.append(list(little_list))  # Need a copy here...
                schedule['FORWARDER_LIST'] = FORWARDER_LIST
                schedule['CCD_LIST'] = CCD_LIST
        else:
            ccds_per_fwdr = len(ccd_list) // num_fwdrs
            remainder_ccds = len(ccd_list) % num_fwdrs
            offset = 0
            for i in range(0, num_fwdrs):
                tmp_list = []
                for j in range(offset, (ccds_per_fwdr + offset)):
                    if (j) >= num_ccds:
                        break
                    tmp_list.append(ccd_list[j])
                #    CCD_LIST.append(ccd_list[j])
                offset = offset + ccds_per_fwdr
                if remainder_ccds != 0 and i == 0:
                    for k in range(offset, offset + remainder_ccds):
                        tmp_list.append(ccd_list[k])
                    offset = offset + remainder_ccds
                FORWARDER_LIST.append(fwdrs_list[i])
                CCD_LIST.append(list(tmp_list))

                #schedule[fwdrs_list[i]] = {}
                #schedule[fwdrs_list[i]]['CCD_LIST'] = tmp_list
            schedule['FORWARDER_LIST'] = FORWARDER_LIST
            schedule['CCD_LIST'] = CCD_LIST

        return schedule

    def ncsa_resources_query(self, params, work_schedule):
        job_num = str(params[JOB_NUM])
        timed_ack_id = self.get_next_timed_ack_id("NCSA_START_INTEGRATION_ACK")
        ncsa_params = {}
        ncsa_params[MSG_TYPE] = "NCSA_START_INTEGRATION"
        ncsa_params[JOB_NUM] = job_num
        ncsa_params['VISIT_ID'] = params['VISIT_ID']
        ncsa_params['IMAGE_ID'] = params['IMAGE_ID']
        ncsa_params['SESSION_ID'] = params['SESSION_ID']
        ncsa_params['REPLY_QUEUE'] = self.PP_FOREMAN_ACK_PUBLISH
        ncsa_params[ACK_ID] = timed_ack_id
        ncsa_params["FORWARDERS"] = work_schedule
        self.JOB_SCBD.set_value_for_job(job_num, "STATE",
                                        "NCSA_START_INT_SENT")
        self._ncsa_publisher.publish_message(self.NCSA_CONSUME, ncsa_params)
        LOGGER.info(
            'The following forwarders schedule has been sent to NCSA for pairing:'
        )
        LOGGER.info(work_schedule)
        return timed_ack_id

    def distribute_job_params(self, params, pairs):
        """ pairs param is a list of dicts. (look at messages.yaml, search for 'PAIR' key, and copy here
        """
        #ncsa has enough resources...
        job_num = str(params[JOB_NUM])
        self.JOB_SCBD.set_pairs_for_job(job_num, pairs)
        LOGGER.info('The following pairs will be used for Job #%s: %s',
                    job_num, pairs)
        fwd_ack_id = self.get_next_timed_ack_id("FWD_PARAMS_ACK")
        fwd_params = {}
        fwd_params[MSG_TYPE] = "PP_FWDR_XFER_PARAMS"
        fwd_params[JOB_NUM] = job_num
        fwd_params['IMAGE_ID'] = params['IMAGE_ID']
        fwd_params['VISIT_ID'] = params['VISIT_ID']
        fwd_params['REPLY_QUEUE'] = self.PP_FOREMAN_ACK_PUBLISH
        fwd_params[ACK_ID] = fwd_ack_id
        fwd_params['XFER_PARAMS'] = {}
        for i in range(0, len(pairs)):
            ddict = {}
            ddict = pairs[i]
            fwdr = ddict['FORWARDER']
            fwd_params['XFER_PARAMS']['CCD_LIST'] = ddict['CCD_LIST']
            fwd_params['XFER_PARAMS']['DISTRIBUTOR'] = ddict['DISTRIBUTOR']
            route_key = self.FWD_SCBD.get_value_for_forwarder(
                fwdr, "CONSUME_QUEUE")
            self._base_publisher.publish_message(route_key, fwd_params)

        return fwd_ack_id

    def accept_job(self, ack_id, job_num):
        dmcs_message = {}
        dmcs_message[JOB_NUM] = job_num
        dmcs_message[MSG_TYPE] = self.PP_START_INTEGRATION_ACK
        dmcs_message['COMPONENT'] = self.COMPONENT_NAME
        dmcs_message[ACK_BOOL] = True
        dmcs_message['ACK_ID'] = ack_id
        self.JOB_SCBD.set_value_for_job(job_num, STATE, "JOB_ACCEPTED")
        self.JOB_SCBD.set_value_for_job(job_num, "TIME_JOB_ACCEPTED",
                                        get_timestamp())
        self._base_publisher.publish_message("dmcs_ack_consume", dmcs_message)
        return True

    def process_dmcs_readout(self, params):
        job_number = params[JOB_NUM]
        pairs = self.JOB_SCBD.get_pairs_for_job(job_number)

        ### Send READOUT to NCSA with ACK_ID
        ack_id = self.get_next_timed_ack_id('NCSA_READOUT_ACK')
        ncsa_params = {}
        ncsa_params[MSG_TYPE] = 'NCSA_READOUT'
        ncsa_params['JOB_NUM'] = job_number
        ncsa_params['VISIT_ID'] = params['VISIT_ID']
        ncsa_params['SESSION_ID'] = params['SESSION_ID']
        ncsa_params['IMAGE_ID'] = params['IMAGE_ID']
        ncsa_params['REPLY_QUEUE'] = 'pp_foreman_ack_publish'
        ncsa_params[ACK_ID] = ack_id
        self._ncsa_publisher.publish_message(self.NCSA_CONSUME, ncsa_params)

        ncsa_response = self.progressive_ack_timer(ack_id, 1, 3.0)

        if ncsa_response:
            if ncsa_response['NCSA_FOREMAN']['ACK_BOOL'] == True:
                #inform forwarders
                fwd_ack_id = self.get_next_timed_ack_id('PP_FWDR_READOUT_ACK')
                len_pairs = len(pairs)
                for i in range(0, len_pairs):
                    forwarder = pairs[i]['FORWARDER']
                    routing_key = self.FWD_SCBD.get_routing_key(forwarder)
                    msg_params = {}
                    msg_params[MSG_TYPE] = 'PP_FWDR_READOUT'
                    msg_params[JOB_NUM] = job_number
                    msg_params['REPLY_QUEUE'] = 'pp_foreman_ack_publish'
                    msg_params['ACK_ID'] = fwd_ack_id
                    self.FWD_SCBD.set_forwarder_state(forwarder,
                                                      'START_READOUT')
                    self._base_publisher.publish_message(
                        routing_key, msg_params)

                forwarder_responses = self.progressive_ack_timer(
                    fwd_ack_id, len_pairs, 4.0)

                if forwarder_responses:
                    dmcs_params = {}
                    dmcs_params[MSG_TYPE] = 'PP_READOUT_ACK'
                    dmcs_params[JOB_NUM] = job_number
                    dmcs_params['COMPONENT'] = self.COMPONENT_NAME
                    dmcs_params['ACK_BOOL'] = True
                    dmcs_params['ACK_ID'] = params['ACK_ID']
                    self._base_publisher.publish_message(
                        params['REPLY_QUEUE'], dmcs_params)

            else:
                #send problem with ncsa to DMCS
                dmcs_params = {}
                dmcs_params[MSG_TYPE] = 'PP_READOUT_ACK'
                dmcs_params[JOB_NUM] = job_number
                dmcs_params['COMPONENT'] = self.COMPONENT_NAME
                dmcs_params['ACK_BOOL'] = False
                dmcs_params['ACK_ID'] = params['ACK_ID']
                self._base_publisher.publish_message('dmcs_ack_consume',
                                                     dmcs_params)

        else:
            #send 'no response from ncsa' to DMCS               )
            dmcs_params = {}
            dmcs_params[MSG_TYPE] = 'PP_READOUT_ACK'
            dmcs_params[JOB_NUM] = job_number
            dmcs_params['COMPONENT'] = self.COMPONENT_NAME
            dmcs_params['ACK_BOOL'] = False
            dmcs_params['ACK_ID'] = params['ACK_ID']
            self._base_publisher.publish_message(params['REPLY_QUEUE'],
                                                 dmcs_params)

    def process_ack(self, params):
        self.ACK_SCBD.add_timed_ack(params)

    def get_next_timed_ack_id(self, ack_type):
        self._next_timed_ack_id = self._next_timed_ack_id + 1
        return (ack_type + "_" + str(self._next_timed_ack_id).zfill(6))

    def ack_timer(self, seconds):
        sleep(seconds)
        return True

    def progressive_ack_timer(self, ack_id, expected_replies, seconds):
        counter = 0.0
        while (counter < seconds):
            counter = counter + 0.5
            sleep(0.5)
            response = self.ACK_SCBD.get_components_for_timed_ack(ack_id)
            if response == None:
                continue
            if len(list(response.keys())) == expected_replies:
                return response

        ## Try one final time
        response = self.ACK_SCBD.get_components_for_timed_ack(ack_id)
        if response == None:
            return None
        elif len(list(response.keys())) == expected_replies:
            return response
        else:
            return None

    def set_pending_nonblock_acks(self, acks, wait_time):
        start_time = datetime.datetime.now().time()
        expiry_time = self.add_seconds(start_time, wait_time)
        ack_msg = {}
        ack_msg[MSG_TYPE] = 'PENDING_ACK'
        ack_msg['EXPIRY_TIME'] = expiry_time
        for ack in acks:
            ack_msg[ACK_ID] = ack
            self._base_publisher.publish_message(self.PP_FOREMAN_ACK_PUBLISH,
                                                 ack_msg)

    def process_pending_ack(self, params):
        self.ACK_SCBD.add_pending_nonblock_ack(params)

    def add_seconds(self, intime, secs):
        basetime = datetime.datetime(100, 1, 1, intime.hour, intime.minute,
                                     intime.second)
        newtime = basetime + datetime.timedelta(seconds=secs)
        return newtime.time()

    def extract_config_values(self):
        LOGGER.info('Reading YAML Config file %s' % self._config_file)
        try:
            cdm = toolsmod.intake_yaml_file(self._config_file)
        except IOError as e:
            LOGGER.critical("Unable to find CFG Yaml file %s\n" %
                            self._config_file)
            print("Unable to find CFG Yaml file %s\n" % self._config_file)
            raise L1ConfigIOError("Trouble opening CFG Yaml file %s: %s" %
                                  (self._config_file, e.arg))

        try:
            self._sub_name = cdm[ROOT][
                PFM_BROKER_NAME]  # Message broker user & passwd
            self._sub_passwd = cdm[ROOT][PFM_BROKER_PASSWD]
            self._pub_name = cdm[ROOT][
                'PFM_BROKER_PUB_NAME']  # Message broker user & passwd
            self._pub_passwd = cdm[ROOT]['PFM_BROKER_PUB_PASSWD']
            self._sub_ncsa_name = cdm[ROOT]['PFM_NCSA_BROKER_NAME']
            self._sub_ncsa_passwd = cdm[ROOT]['PFM_NCSA_BROKER_PASSWD']
            self._pub_ncsa_name = cdm[ROOT]['PFM_NCSA_BROKER_PUB_NAME']
            self._pub_ncsa_passwd = cdm[ROOT]['PFM_NCSA_BROKER_PUB_PASSWD']
            self._base_broker_addr = cdm[ROOT][BASE_BROKER_ADDR]
            self._ncsa_broker_addr = cdm[ROOT][NCSA_BROKER_ADDR]
            self._forwarder_dict = cdm[ROOT][XFER_COMPONENTS]['PP_FORWARDERS']
            self._scbd_dict = cdm[ROOT]['SCOREBOARDS']
            self.DMCS_FAULT_QUEUE = cdm[ROOT]['DMCS_FAULT_QUEUE']
            self._policy_max_ccds_per_fwdr = int(
                cdm[ROOT]['POLICY']['MAX_CCDS_PER_FWDR'])
        except KeyError as e:
            LOGGER.critical("CDM Dictionary Key error")
            LOGGER.critical("Offending Key is %s", str(e))
            LOGGER.critical("Bailing out...")
            print("KeyError when reading CFG file. Check logs...exiting...")
            raise L1ConfigKeyError("Key Error when reading config file: %s" %
                                   e.arg)

        self._base_msg_format = 'YAML'
        self._ncsa_msg_format = 'YAML'

        if 'BASE_MSG_FORMAT' in cdm[ROOT]:
            self._base_msg_format = cdm[ROOT][BASE_MSG_FORMAT]

        if 'NCSA_MSG_FORMAT' in cdm[ROOT]:
            self._ncsa_msg_format = cdm[ROOT][NCSA_MSG_FORMAT]

    def setup_consumer_threads(self):
        LOGGER.info('Building _base_broker_url')
        base_broker_url = "amqp://" + self._sub_name + ":" + \
                                            self._sub_passwd + "@" + \
                                            str(self._base_broker_addr)

        ncsa_broker_url = "amqp://" + self._sub_ncsa_name + ":" + \
                                            self._sub_ncsa_passwd + "@" + \
                                            str(self._ncsa_broker_addr)

        self.shutdown_event = threading.Event()
        self.shutdown_event.clear()

        # Set up kwargs that describe consumers to be started
        # The Archive Device needs three message consumers
        kws = {}
        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-pp_foreman_consume'
        md['queue'] = 'pp_foreman_consume'
        md['callback'] = self.on_dmcs_message
        md['format'] = "YAML"
        md['test_val'] = None
        kws[md['name']] = md

        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-pp_foreman_ack_publish'
        md['queue'] = 'pp_foreman_ack_publish'
        md['callback'] = self.on_ack_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        md = {}
        md['amqp_url'] = ncsa_broker_url
        md['name'] = 'Thread-ncsa_publish'
        md['queue'] = 'ncsa_publish'
        md['callback'] = self.on_ncsa_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        try:
            self.thread_manager = ThreadManager('thread-manager', kws,
                                                self.shutdown_event)
            self.thread_manager.start()
        except ThreadError as e:
            LOGGER.error(
                "PP_Device unable to launch Consumers - Thread Error: %s" %
                e.arg)
            print("PP_Device unable to launch Consumers - Thread Error: %s" %
                  e.arg)
            raise L1ConsumerError(
                "Thread problem preventing Consumer launch: %s" % e.arg)
        except Exception as e:
            LOGGER.error("PP_Device unable to launch Consumers: %s" % e.arg)
            print("PP_Device unable to launch Consumers: %s" % e.arg)
            raise L1Error(
                "PP_Device unable to launch Consumers - Rabbit Problem?: %s" %
                e.arg)

    def setup_scoreboards(self):
        try:
            # Create Redis Forwarder table with Forwarder info
            self.FWD_SCBD = ForwarderScoreboard('PP_FWD_SCBD',
                                                self._scbd_dict['PP_FWD_SCBD'],
                                                self._forwarder_dict)
            self.JOB_SCBD = JobScoreboard('PP_JOB_SCBD',
                                          self._scbd_dict['PP_JOB_SCBD'])
            self.ACK_SCBD = AckScoreboard('PP_ACK_SCBD',
                                          self._scbd_dict['PP_ACK_SCBD'])
        except L1RabbitConnectionError as e:
            LOGGER.error(
                "PP_Device unable to complete setup_scoreboards-No Rabbit Connect: %s"
                % e.arg)
            print(
                "PP_Device unable to complete setup_scoreboards - No Rabbit Connection: %s"
                % e.arg)
            sys.exit(self.ErrorCodePrefix + 11)
        except L1RedisError as e:
            LOGGER.error(
                "PP_Device unable to complete setup_scoreboards - no Redis connect: %s"
                % e.arg)
            print(
                "PP_Device unable to complete setup_scoreboards - no Redis connection: %s"
                % e.arg)
            sys.exit(self.ErrorCodePrefix + 12)
        except Exception as e:
            LOGGER.error(
                "PP_Device init unable to complete setup_scoreboards: %s" %
                e.arg)
            print("PP_Device unable to complete setup_scoreboards: %s" % e.arg)
            sys.exit(self.ErrorCodePrefix + 10)

    def send_fault(error_string, error_code, job_num, component_name):
        msg = {}
        msg['MSG_TYPE'] = 'FAULT'
        msg['COMPONENT'] = component_name
        msg['JOB_NUM'] = job_num
        msg['ERROR_CODE'] = str(error_code)
        msg["DESCRIPTION"] = error_string
        self._base_publisher.publish_message(self.DMCS_FAULT_QUEUE, msg)

    def purge_broker(self, queues):
        for q in queues:
            cmd = "rabbitmqctl -p /tester purge_queue " + q
            os.system(cmd)

    def shutdown(self):
        LOGGER.debug("PromptProcessDevice: Shutting down Consumer threads.")
        self.shutdown_event.set()
        LOGGER.debug("Thread Manager shutting down and app exiting...")
        print("\n")
        os._exit(0)
Exemple #12
0
	def engine_off(self):
		ThreadManager.unsubscribe(self.eventGenerator)
		self.running = 0
Exemple #13
0
	def engine_on(self):
		self.running = 1
		#self.eventGenerator = threading.Thread(target=generateEvents, args=(self,))
		self.eventGenerator = EventGenerator(self)
		ThreadManager.subscribe(self.eventGenerator)
		self.eventGenerator.start()
from TriatgeWS import TriatgeWS
import logging
import sys

# LOG_FILENAME = '/var/mail/pysimplesoap.server'
# LOG_FILENAME = 'C:/Users/greusr/Desktop/borrame.txt'

if len(sys.argv) > 1:
    Constants.HOME_DIR = sys.argv[1]

LOG_FILENAME = Constants.HOME_DIR + '/log/log.log'

logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)

print("Init")

defaultConfig = DefaultDispatcher.defaultDispatcher_init()

threadManager = ThreadManager()
ThreadManager.setDefaultThread(defaultConfig)

#Add your WS here

triatgeConfig = TriatgeWS.triatgeDispatcher_init()
ThreadManager.addThread(triatgeConfig)

#Init all Threads
ThreadManager.startAllThreads()

print("Finish")
Exemple #15
0
    def __init__(self):
        self.module_manager = ModuleManager(self)
        self.thread_manager = ThreadManager()

        self.active = True
Exemple #16
0
    def setup_consumer_threads(self):
        LOGGER.info('Building _base_broker_url')
        base_broker_url = "amqp://" + self._sub_name + ":" + \
                                            self._sub_passwd + "@" + \
                                            str(self._base_broker_addr)

        ncsa_broker_url = "amqp://" + self._sub_ncsa_name + ":" + \
                                            self._sub_ncsa_passwd + "@" + \
                                            str(self._ncsa_broker_addr)

        self.shutdown_event = threading.Event()
        self.shutdown_event.clear()

        # Set up kwargs that describe consumers to be started
        # The Archive Device needs three message consumers
        kws = {}
        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-pp_foreman_consume'
        md['queue'] = 'pp_foreman_consume'
        md['callback'] = self.on_dmcs_message
        md['format'] = "YAML"
        md['test_val'] = None
        kws[md['name']] = md

        md = {}
        md['amqp_url'] = base_broker_url
        md['name'] = 'Thread-pp_foreman_ack_publish'
        md['queue'] = 'pp_foreman_ack_publish'
        md['callback'] = self.on_ack_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        md = {}
        md['amqp_url'] = ncsa_broker_url
        md['name'] = 'Thread-ncsa_publish'
        md['queue'] = 'ncsa_publish'
        md['callback'] = self.on_ncsa_message
        md['format'] = "YAML"
        md['test_val'] = 'test_it'
        kws[md['name']] = md

        try:
            self.thread_manager = ThreadManager('thread-manager', kws,
                                                self.shutdown_event)
            self.thread_manager.start()
        except ThreadError as e:
            LOGGER.error(
                "PP_Device unable to launch Consumers - Thread Error: %s" %
                e.arg)
            print("PP_Device unable to launch Consumers - Thread Error: %s" %
                  e.arg)
            raise L1ConsumerError(
                "Thread problem preventing Consumer launch: %s" % e.arg)
        except Exception as e:
            LOGGER.error("PP_Device unable to launch Consumers: %s" % e.arg)
            print("PP_Device unable to launch Consumers: %s" % e.arg)
            raise L1Error(
                "PP_Device unable to launch Consumers - Rabbit Problem?: %s" %
                e.arg)
from ThreadManager import ThreadManager


class RandomSource:
    def __init__(self):
        pass

    def printRandom(self, amount, sleep):
        for i in range(amount):
            print("[{}]: {}".format(i, self.getRandomNumber(0, 10)))
            time.sleep(sleep)

    def getRandomNumber(self, min, max):
        return random.randint(min, max)


source = RandomSource()

manager = ThreadManager.getInstance()

print("Creating first thread.")
manager.createThread("source1", source.printRandom, 5, 0.2)
manager.getThread("source1").waitOn()

print("Creating second thread.")
manager.createThread("source2", source.printRandom, 10, 0.2)
print("Creating third thread.")
manager.createThread("source3", source.printRandom, 5, 1)

manager.waitForThreads()
Exemple #18
0
import sys

sys.path.append("/home/pi/MMNE/Threading")
from WorkerThread import WorkerThread
from ThreadManager import ThreadManager
from APListener import APListener
import subprocess
from Queue import Queue
import time

if __name__ == '__main__':
    queueIn = Queue()
    queueOut = Queue()
    queueTuple = (queueIn, queueOut)
    ListenerID = "AP_Status"
    qDict = {ListenerID: queueTuple}
    APdict = {ListenerID: APListener}

    threadMgr = ThreadManager(APdict, qDict)

    threadMgr.startThread(ListenerID)

    while True:
        time.sleep(1)
        iwData, arpData = threadMgr.getFrom(ListenerID)
        print 'iw Data: '
        print iwData
        print 'arp Data: '
        print arpData