Exemplo n.º 1
0
def DrivenAgent(config_path, **kwargs):
    '''Driven harness for deployment of OpenEIS applications in VOLTTRON.'''
    config = utils.load_config(config_path)
    mode = True if config.get('mode', 'PASSIVE') == 'ACTIVE' else False
    validation_error = ''
    device = dict((key, config['device'][key])
                  for key in ['campus', 'building'])
    subdevices = []
    conv_map = config.get('conversion_map')
    map_names = {}
    for key, value in conv_map.items():
        map_names[key.lower() if isinstance(key, str) else key] = value
    # this implies a sub-device listing
    multiple_dev = isinstance(config['device']['unit'], dict)
    if multiple_dev:
        # Assumption that there will be only one entry in the dictionary.
        units = config['device']['unit'].keys()
    for item in units:
        subdevices.extend(config['device']['unit'][item]['subdevices'])
        # modify the device dict so that unit is now pointing to unit_name
    agent_id = config.get('agentid')
    device.update({'unit': units})
    _analysis = deepcopy(device)
    _analysis_name = config.get('device').get('analysis_name', 'analysis_name')
    _analysis.update({'analysis_name': _analysis_name})
    if not device:
        validation_error += 'Invalid agent_id specified in config\n'
    if not device:
        validation_error += 'Invalid device path specified in config\n'
    actuator_id = (
        agent_id + '_' + "{campus}/{building}/{unit}".format(**device)
    )
    application = config.get('application')
    if not application:
        validation_error += 'Invalid application specified in config\n'
    utils.setup_logging()
    _log = logging.getLogger(__name__)
    logging.basicConfig(level=logging.debug,
                        format='%(asctime)s   %(levelname)-8s %(message)s',
                        datefmt='%m-%d-%y %H:%M:%S')
    if validation_error:
        _log.error(validation_error)
        raise ValueError(validation_error)
    config.update(config.get('arguments'))
    converter = ConversionMapper()
    output_file = config.get('output_file')
    base_dev = "devices/{campus}/{building}/".format(**device)
    devices_topic = (
        base_dev + '({})(/.*)?/all$'
        .format('|'.join(re.escape(p) for p in units)))
    klass = _get_class(application)
    # This instances is used to call the applications run method when
    # data comes in on the message bus.  It is constructed here
    # so that_process_results each time run is called the application
    # can keep it state.
    app_instance = klass(**config)

    class Agent(PublishMixin, BaseAgent):
        '''Agent listens to message bus device and runs when data is published.
        '''
        def __init__(self, **kwargs):
            super(Agent, self).__init__(**kwargs)
            self._update_event = None
            self._update_event_time = None
            self.keys = None
            # master is where we copy from to get a poppable list of
            # subdevices that should be present before we run the analysis.
            self._master_subdevices = subdevices
            self._needed_subdevices = []
            self._master_devices = units
            self._subdevice_values = {}
            self._needed_devices = []
            self._device_values = {}
            self._initialize_devices()
            self.received_input_datetime = None
            self._kwargs = kwargs
            self.commands = {}
            self.current_point = None
            self.current_key = None
            if output_file is not None:
                with open(output_file, 'w') as writer:
                    writer.close()
            self._header_written = False

        def _initialize_devices(self):
            self._needed_subdevices = deepcopy(self._master_subdevices)
            self._needed_devices = deepcopy(self._master_devices)
            self._subdevice_values = {}
            self._device_values = {}

        def _should_run_now(self):
            # Assumes the unit/all values will have values.
            if not len(self._device_values.keys()) > 0:
                return False
            return not (len(self._needed_subdevices) > 0 or
                        len(self._needed_devices) > 0)

        @matching.match_regex(devices_topic)
        def on_rec_analysis_message(self, topic, headers, message, matched):
            # Do the analysis based upon the data passed (the old code).
            # print self._subdevice_values, self._device_values
            obj = jsonapi.loads(message[0])
            dev_list = topic.split('/')
            device_or_subdevice = dev_list[-2]
            device_id = [dev for dev in self._master_devices
                         if dev == device_or_subdevice]
            subdevice_id = [dev for dev in self._master_subdevices
                            if dev == device_or_subdevice]
            if not device_id and not subdevice_id:
                return
            if isinstance(device_or_subdevice, unicode):
                device_or_subdevice = (
                    device_or_subdevice.decode('utf-8').encode('ascii')
                )

            def agg_subdevice(obj):
                sub_obj = {}
                for key, value in obj.items():
                    sub_key = ''.join([key, '_', device_or_subdevice])
                    sub_obj[sub_key] = value
                if len(dev_list) > 5:
                    self._subdevice_values.update(sub_obj)
                    self._needed_subdevices.remove(device_or_subdevice)
                else:
                    self._device_values.update(sub_obj)
                    self._needed_devices.remove(device_or_subdevice)
                return
            # The below if statement is used to distinguish between unit/all
            # and unit/sub-device/all
            if (device_or_subdevice not in self._needed_devices and
                    device_or_subdevice not in self._needed_subdevices):
                _log.error("Warning device values already present, "
                           "reinitializing")
                self._initialize_devices()
            agg_subdevice(obj)
            if self._should_run_now():
                field_names = {}
                self._device_values.update(self._subdevice_values)
                for k, v in self._device_values.items():
                    field_names[k.lower() if isinstance(k, str) else k] = v
                if not converter.initialized and \
                        conv_map is not None:
                    converter.setup_conversion_map(
                        map_names,
                        field_names
                    )
                obj = converter.process_row(field_names)
                results = app_instance.run(datetime.now(),
                                           obj)
                self.received_input_datetime = datetime.utcnow()
                # results = app_instance.run(
                # dateutil.parser.parse(self._subdevice_values['Timestamp'],
                #                       fuzzy=True), self._subdevice_values)
                self._process_results(results)
                self._initialize_devices()
            else:
                needed = deepcopy(self._needed_devices)
                needed.extend(self._needed_subdevices)
                _log.info("Still need {} before running."
                          .format(needed))

        def _process_results(self, results):
            '''Run driven application with converted data and write the app
            results to a file or database.
            '''
            _log.debug('Processing Results!')
            for key, value in results.commands.iteritems():
                _log.debug("COMMAND: {}->{}".format(key, value))
            for value in results.log_messages:
                _log.debug("LOG: {}".format(value))
            for key, value in results.table_output.iteritems():
                _log.debug("TABLE: {}->{}".format(key, value))
            if output_file is not None:
                if len(results.table_output.keys()) > 0:
                    for _, v in results.table_output.items():
                        fname = output_file  # +"-"+k+".csv"
                        for r in v:
                            with open(fname, 'a+') as f:
                                keys = r.keys()
                                fout = csv.DictWriter(f, keys)
                                if not self._header_written:
                                    fout.writeheader()
                                    self._header_written = True
                                # if not header_written:
                                    # fout.writerow(keys)
                                fout.writerow(r)
                                f.close()
            # publish to message bus.
            if len(results.table_output.keys()) > 0:
                headers = {
                    headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON,
                    headers_mod.DATE: str(self.received_input_datetime),
                }

                for _, v in results.table_output.items():
                    for r in v:
                        for key, value in r.iteritems():
                            if isinstance(value, bool):
                                value = int(value)
                            for item in units:
                                _analysis['unit'] = item
                                analysis_topic = topics.ANALYSIS_VALUE(
                                    point=key, **_analysis)
                                self.publish_json(analysis_topic, headers, value)
#                                 mytime = int(time.time())
#                                 content = {
#                                     analysis_topic: {
#                                         "Readings": [[mytime, value]],
#                                         "Units": "TU",
#                                         "data_type": "double"
#                                     }
#                                 }
#                                 self.publish_json(topics.LOGGER_LOG, headers,
#                                                   content)
            if results.commands and mode:
                self.commands = results.commands
                if self.keys is None:
                    self.keys = self.commands.keys()
                self.schedule_task()

        def schedule_task(self):
            '''Schedule access to modify device controls.'''
            _log.debug('Schedule Device Access')
            headers = {
                'type':  'NEW_SCHEDULE',
                'requesterID': agent_id,
                'taskID': actuator_id,
                'priority': 'LOW'
                }
            start = datetime.now()
            end = start + td(seconds=30)
            start = str(start)
            end = str(end)
            self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers,
                              [["{campus}/{building}/{unit}".format(**device),
                                start, end]])

        def command_equip(self):
            '''Execute commands on configured device.'''
            self.current_key = self.keys[0]
            value = self.commands[self.current_key]
            headers = {
                'Content-Type': 'text/plain',
                'requesterID': agent_id,
                }
            self.publish(topics.ACTUATOR_SET(point=self.current_key, **device),
                         headers, str(value))

        @matching.match_headers({headers_mod.REQUESTER_ID: agent_id})
        @matching.match_exact(topics.ACTUATOR_SCHEDULE_RESULT())
        def schedule_result(self, topic, headers, message, match):
            '''Actuator response (FAILURE, SUCESS).'''
            _log.debug('Actuator Response')
            msg = jsonapi.loads(message[0])
            msg = msg['result']
            _log.debug('Schedule Device ACCESS')
            if self.keys:
                if msg == "SUCCESS":
                    self.command_equip()
                elif msg == "FAILURE":
                    _log.debug('Auto-correction of device failed.')

        @matching.match_headers({headers_mod.REQUESTER_ID: agent_id})
        @matching.match_glob(topics.ACTUATOR_VALUE(point='*', **device))
        def on_set_result(self, topic, headers, message, match):
            '''Setting of point on device was successful.'''
            _log.debug('Set Success:  {point} - {value}'
                       .format(point=self.current_key,
                               value=str(self.commands[self.current_key])))
            _log.debug('set_point({}, {})'.
                       format(self.current_key,
                              self.commands[self.current_key]))
            self.keys.remove(self.current_key)
            if self.keys:
                self.command_equip()
            else:
                _log.debug('Done with Commands - Release device lock.')
                headers = {
                    'type': 'CANCEL_SCHEDULE',
                    'requesterID': agent_id,
                    'taskID': actuator_id
                    }
                self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(),
                                  headers, {})
                self.keys = None

        @matching.match_headers({headers_mod.REQUESTER_ID: agent_id})
        @matching.match_glob(topics.ACTUATOR_ERROR(point='*', **device))
        def on_set_error(self, topic, headers, message, match):
            '''Setting of point on device failed, log failure message.'''
            _log.debug('Set ERROR')
            msg = jsonapi.loads(message[0])
            msg = msg['type']
            _log.debug('Actuator Error: ({}, {}, {})'.
                       format(msg,
                              self.current_key,
                              self.commands[self.current_key]))
            self.keys.remove(self.current_key)
            if self.keys:
                self.command_equip()
            else:
                headers = {
                    'type':  'CANCEL_SCHEDULE',
                    'requesterID': agent_id,
                    'taskID': actuator_id
                    }
                self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(),
                                  headers, {})
                self.keys = None

    Agent.__name__ = 'DrivenLoggerAgent'
    return Agent(**kwargs)
Exemplo n.º 2
0
def driven_agent(config_path, **kwargs):
    """Reads agent configuration and converts it to run driven agent.
    :param kwargs: Any driver specific parameters"""
    config = utils.load_config(config_path)
    arguments = config.get('arguments')
    mode = True if config.get('mode', 'PASSIVE') == 'ACTIVE' else False
    multiple_devices = isinstance(config['device']['unit'], dict)
    campus_building_config = config['device']
    analysis_name = campus_building_config.get('analysis_name',
                                               'analysis_name')
    analysis_dict = {'analysis_name': analysis_name}
    arguments.update(analysis_dict)
    agent_id = config.get('agentid', None)
    actuator_id = agent_id if agent_id is not None else analysis_name
    campus_building = dict(
        (key, campus_building_config[key]) for key in ['campus', 'building'])
    analysis = deepcopy(campus_building)
    analysis.update(analysis_dict)
    device_config = config['device']['unit']
    command_devices = device_config.keys()
    device_topic_dict = {}
    device_topic_list = []
    subdevices_list = []
    vip_destination = config.get('vip_destination', None)
    from_file = config.get('from_file')
    for device_name in device_config:
        device_topic = topics.DEVICES_VALUE(
            campus=campus_building.get('campus'),
            building=campus_building.get('building'),
            unit=device_name,
            path='',
            point='all')
        device_topic_dict.update({device_topic: device_name})
        device_topic_list.append(device_name)
        if multiple_devices:
            for subdevice in device_config[device_name]['subdevices']:
                subdevices_list.append(subdevice)
                subdevice_topic = topics.DEVICES_VALUE(
                    campus=campus_building.get('campus'),
                    building=campus_building.get('building'),
                    unit=device_name,
                    path=subdevice,
                    point='all')
                subdevice_name = device_name + "/" + subdevice
                device_topic_dict.update({subdevice_topic: subdevice_name})
                device_topic_list.append(subdevice_name)

    base_actuator_path = topics.RPC_DEVICE_PATH(
        campus=campus_building.get('campus', ''),
        building=campus_building.get('building', ''),
        unit=None,
        path='',
        point=None)
    device_lock_duration = config.get('device_lock_duration', 1.25)
    conversion_map = config.get('conversion_map')
    map_names = {}
    for key, value in conversion_map.items():
        map_names[key.lower() if isinstance(key, str) else key] = value
    application = config.get('application')
    validation_error = ''
    if not application:
        validation_error = 'Invalid application specified in config\n'
    if validation_error:
        _log.error(validation_error)
        raise ValueError(validation_error)
    config.update(config.get('arguments'))
    converter = ConversionMapper()
    output_file_prefix = config.get('output_file')
    #unittype_map = config.get('unittype_map', None)
    #assert unittype_map

    klass = _get_class(application)
    # This instances is used to call the applications run method when
    # data comes in on the message bus.  It is constructed here
    # so that_process_results each time run is called the application
    # can keep it state.
    app_instance = klass(**arguments)

    class DrivenAgent(Agent):
        """Agent listens to message bus device and runs when data is published.
        """
        def __init__(self, **kwargs):
            """
            Initializes agent
            :param kwargs: Any driver specific parameters"""

            super(DrivenAgent, self).__init__(**kwargs)

            # master is where we copy from to get a poppable list of
            # subdevices that should be present before we run the analysis.
            self._master_devices = device_topic_list
            self._needed_devices = []
            self._device_values = {}
            self._initialize_devices()
            self.received_input_datetime = None
            self._kwargs = kwargs
            self._header_written = False
            self.file_creation_set = set()
            self.actuation_vip = self.vip.rpc
            if vip_destination:
                self.agent = self.setup_remote_actuation(vip_destination)
                self.actuation_vip = self.agent.vip.rpc

        def _initialize_devices(self):
            self._needed_devices = deepcopy(self._master_devices)
            self._device_values = {}

        def setup_remote_actuation(self, vip_destination):
            event = gevent.event.Event()
            agent = Agent(address=vip_destination)
            gevent.spawn(agent.core.run, event)
            event.wait(timeout=15)
            return agent

        @Core.receiver('onstart')
        def starup(self, sender, **kwargs):
            """
            Starts up the agent and subscribes to device topics
            based on agent configuration.
            :param sender:
            :param kwargs: Any driver specific parameters
            :type sender: str"""
            self._initialize_devices()
            for device_topic in device_topic_dict:
                _log.info('Subscribing to ' + device_topic)
                self.vip.pubsub.subscribe(peer='pubsub',
                                          prefix=device_topic,
                                          callback=self.on_analysis_message)

        def _should_run_now(self):
            """
            Checks if messages from all the devices are received
                before running application
            :returns: True or False based on received messages.
            :rtype: boolean"""
            # Assumes the unit/all values will have values.
            if not len(self._device_values.keys()) > 0:
                return False
            return not len(self._needed_devices) > 0

        def on_analysis_message(self, peer, sender, bus, topic, headers,
                                message):
            """
            Subscribe to device data and assemble data set to pass
                to applications.
            :param peer:
            :param sender: device name
            :param bus:
            :param topic: device path topic
            :param headers: message headers
            :param message: message containing points and values dict
                    from device with point type
            :type peer: str
            :type sender: str
            :type bus: str
            :type topic: str
            :type headers: dict
            :type message: dict"""

            device_data = message[0]
            if isinstance(device_data, list):
                device_data = device_data[0]

            def aggregate_subdevice(device_data):
                tagged_device_data = {}
                device_tag = device_topic_dict[topic]
                if device_tag not in self._needed_devices:
                    return False
                for key, value in device_data.items():
                    device_data_tag = '&'.join([key, device_tag])
                    tagged_device_data[device_data_tag] = value
                self._device_values.update(tagged_device_data)
                self._needed_devices.remove(device_tag)
                return True

            device_needed = aggregate_subdevice(device_data)
            if not device_needed:
                _log.error("Warning device values already present, "
                           "reinitializing")
                self._initialize_devices()
            if self._should_run_now():
                field_names = {}
                for key, value in self._device_values.items():
                    field_names[key.lower() if isinstance(key, str
                                                          ) else key] = value
                if not converter.initialized and conversion_map is not None:
                    converter.setup_conversion_map(map_names, field_names)
                if from_file:
                    _timestamp = parse(headers.get('Date'))
                    self.received_input_datetime = _timestamp
                else:
                    _timestamp = dt.now()
                    self.received_input_datetime = dt.utcnow()

                device_data = converter.process_row(field_names)
                results = app_instance.run(_timestamp, device_data)
                # results = app_instance.run(
                # dateutil.parser.parse(self._subdevice_values['Timestamp'],
                #                       fuzzy=True), self._subdevice_values)
                self._process_results(results)
                self._initialize_devices()
            else:
                _log.info("Still need {} before running.".format(
                    self._needed_devices))

        def _process_results(self, results):
            """
            Runs driven application with converted data. Calls appropriate
                methods to process commands, log and table_data in results.
            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven
            :returns: Same as results param.
            :rtype: Results object \\volttron.platform.agent.driven"""
            _log.info('Processing Results!')
            actuator_error = True
            if mode:
                if results.devices:
                    actuator_error = self.actuator_request(results.devices)
                elif results.commands:
                    actuator_error = self.actuator_request(command_devices)
                if not actuator_error:
                    results = self.actuator_set(results)
            for value in results.log_messages:
                _log.info("LOG: {}".format(value))
            for key, value in results.table_output.items():
                _log.info("TABLE: {}->{}".format(key, value))
            if output_file_prefix is not None:
                results = self.create_file_output(results)
            if len(results.table_output.keys()):
                results = self.publish_analysis_results(results)
            return results

        def publish_analysis_results(self, results):
            """
            Publish table_data in analysis results to the message bus for
                capture by the data historian.

            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven
            :returns: Same as results param.
            :rtype: Results object \\volttron.platform.agent.driven"""

            headers = {
                headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON,
                headers_mod.DATE: str(self.received_input_datetime),
            }
            for app, analysis_table in results.table_output.items():
                try:
                    name_timestamp = app.split('&')
                    _name = name_timestamp[0]
                    timestamp = name_timestamp[1]
                except:
                    _name = app
                    timestamp = str(self.received_input_datetime)
                headers = {
                    headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON,
                    headers_mod.DATE: timestamp,
                }

                # The keys in this publish should look like the following
                # with the values being a dictionary of points off of these
                # base topics
                #
                # Schedule-Reset ACCx/data/interior_ahu/vav1600e
                # Schedule-Reset ACCx/data/interior_ahu/vav1534
                to_publish = defaultdict(list)
                for entry in analysis_table:
                    for key, value in entry.items():
                        for _device in command_devices:
                            analysis['unit'] = _device
                            analysis_topic = topics.ANALYSIS_VALUE(point=key,
                                                                   **analysis)
                            datatype = 'float'
                            if isinstance(value, int):
                                datatype = 'int'
                            kbase = key[key.rfind('/') + 1:]
                            topic_without_point = analysis_topic[:
                                                                 analysis_topic
                                                                 .rfind('/')]

                            if not to_publish[topic_without_point]:
                                to_publish[topic_without_point] = [{}, {}]

                            to_publish[topic_without_point][0][kbase] = value
                            to_publish[topic_without_point][1][kbase] = {
                                'tz': 'US/Pacific',
                                'type': datatype,
                                'units': 'float',
                            }

                for equipment, _analysis in to_publish.items():
                    self.vip.pubsub.publish('pubsub', equipment, headers,
                                            _analysis)

                to_publish.clear()
            return results

        def create_file_output(self, results):
            """
            Create results/data files for testing and algorithm validation
            if table data is present in the results.

            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven
            :returns: Same as results param.
            :rtype: Results object \\volttron.platform.agent.driven"""
            for key, value in results.table_output.items():
                name_timestamp = key.split('&')
                _name = name_timestamp[0]
                timestamp = name_timestamp[1]
                file_name = output_file_prefix + "-" + _name + ".csv"
                if file_name not in self.file_creation_set:
                    self._header_written = False
                self.file_creation_set.update([file_name])
                for row in value:
                    with open(file_name, 'a+') as file_to_write:
                        row.update({'Timestamp': timestamp})
                        _keys = row.keys()
                        file_output = csv.DictWriter(file_to_write, _keys)
                        if not self._header_written:
                            file_output.writeheader()
                            self._header_written = True
                        file_output.writerow(row)
                    file_to_write.close()
            return results

        def actuator_request(self, command_equip):
            """
            Calls the actuator's request_new_schedule method to get
                    device schedule
            :param command_equip: contains the names of the devices
                that will be scheduled with the ActuatorAgent.
            :type: dict or list
            :returns: Return result from request_new_schedule method
                and True or False for error in scheduling device.
            :rtype: boolean
            :Return Values:

                request_error = True/False

            warning:: Calling without previously scheduling a device and not within
                         the time allotted will raise a LockError"""

            _now = dt.now()
            str_now = _now.strftime(DATE_FORMAT)
            _end = _now + td(minutes=device_lock_duration)
            str_end = _end.strftime(DATE_FORMAT)
            for device in command_equip:
                actuation_device = base_actuator_path(unit=device, point='')
                schedule_request = [[actuation_device, str_now, str_end]]
                try:
                    _log.info('Make Request {} for start {} and end {}'.format(
                        actuation_device, str_now, str_end))
                    result = self.actuation_vip.call(
                        'platform.actuator', 'request_new_schedule',
                        actuator_id, actuation_device, 'HIGH',
                        schedule_request).get(timeout=15)
                except RemoteError as ex:
                    _log.warning(
                        "Failed to schedule device {} (RemoteError): {}".
                        format(device, str(ex)))
                    request_error = True
                if result['result'] == 'FAILURE':
                    if result['info'] == 'TASK_ID_ALREADY_EXISTS':
                        _log.info('Task to schedule device already exists ' +
                                  device)
                        request_error = False
                    else:
                        _log.warn('Failed to schedule device (unavailable) ' +
                                  device)
                        request_error = True
                else:
                    request_error = False

            return request_error

        def actuator_set(self, results):
            """
            Calls the actuator's set_point method to set point on device

            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven"""
            def make_actuator_set(device, point_value_dict):
                for point, new_value in point_value_dict.items():
                    point_path = base_actuator_path(unit=device, point=point)
                    try:
                        _log.info('Set point {} to {}'.format(
                            point_path, new_value))
                        result = self.actuation_vip.call(
                            'platform.actuator', 'set_point', actuator_id,
                            point_path, new_value).get(timeout=15)
                    except RemoteError as ex:
                        _log.warning("Failed to set {} to {}: {}".format(
                            point_path, new_value, str(ex)))
                        continue

            for device, point_value_dict in results.devices.items():
                make_actuator_set(device, point_value_dict)

            for device in command_devices:
                make_actuator_set(device, results.commands)
            return results

    DrivenAgent.__name__ = 'DrivenLoggerAgent'
    return DrivenAgent(**kwargs)
Exemplo n.º 3
0
def DrivenAgent(config_path, **kwargs):
    '''Driven harness for deployment of OpenEIS applications in VOLTTRON.'''
    config = utils.load_config(config_path)
    mode = True if config.get('mode', 'PASSIVE') == 'ACTIVE' else False
    validation_error = ''
    device = dict(
        (key, config['device'][key]) for key in ['campus', 'building', 'unit'])
    agent_id = config.get('agentid')
    if not device:
        validation_error += 'Invalid agent_id specified in config\n'
    if not device:
        validation_error += 'Invalid device path specified in config\n'
    actuator_id = agent_id + '_' + "{campus}/{building}/{unit}".format(
        **device)
    application = config.get('application')
    if not application:
        validation_error += 'Invalid application specified in config\n'
    utils.setup_logging()
    _log = logging.getLogger(__name__)
    logging.basicConfig(level=logging.debug,
                        format='%(asctime)s   %(levelname)-8s %(message)s',
                        datefmt='%m-%d-%y %H:%M:%S')
    if validation_error:
        _log.error(validation_error)
        raise ValueError(validation_error)
    config.update(config.get('arguments'))
    converter = ConversionMapper()
    output_file = config.get('output_file')
    klass = _get_class(application)

    # This instances is used to call the applications run method when
    # data comes in on the message bus.  It is constructed here so that
    # each time run is called the application can keep it state.
    app_instance = klass(**config)

    class Agent(PublishMixin, BaseAgent):
        '''Agent listens to message bus device and runs when data is published.
        '''
        def __init__(self, **kwargs):
            super(Agent, self).__init__(**kwargs)
            self._update_event = None
            self._update_event_time = None
            self.keys = None
            self._device_states = {}
            self._kwargs = kwargs
            self.commands = {}
            self.current_point = None
            self.current_key = None
            self.received_input_datetime = None
            if output_file != None:
                with open(output_file, 'w') as writer:
                    writer.close()
            self._header_written = False

        @matching.match_exact(topics.DEVICES_VALUE(point='all', **device))
        def on_received_message(self, topic, headers, message, matched):
            '''Subscribe to device data and convert data to correct type for
            the driven application.
            '''
            _log.debug("Message received")
            _log.debug("MESSAGE: " + jsonapi.dumps(message[0]))
            _log.debug("TOPIC: " + topic)
            data = jsonapi.loads(message[0])

            #TODO: grab the time from the header if it's there or use now if not
            self.received_input_datetime = datetime.utcnow()
            results = app_instance.run(self.received_input_datetime, data)
            self._process_results(results)

        def _process_results(self, results):
            '''Run driven application with converted data and write the app
            results to a file or database.
            '''
            _log.debug('Processing Results!')
            for key, value in results.commands.iteritems():
                _log.debug("COMMAND: {}->{}".format(key, value))
            for value in results.log_messages:
                _log.debug("LOG: {}".format(value))
            for key, value in results.table_output.iteritems():
                _log.debug("TABLE: {}->{}".format(key, value))
            # publish to output file if available.
            if output_file != None:
                if len(results.table_output.keys()) > 0:
                    for _, v in results.table_output.items():
                        fname = output_file  # +"-"+k+".csv"
                        for r in v:
                            with open(fname, 'a+') as f:
                                keys = r.keys()
                                fout = csv.DictWriter(f, keys)
                                if not self._header_written:
                                    fout.writeheader()
                                    self._header_written = True
                                # if not header_written:
                                # fout.writerow(keys)
                                fout.writerow(r)
                                f.close()
            # publish to message bus.
            if len(results.table_output.keys()) > 0:
                now = utils.format_timestamp(self.received_input_datetime)
                headers = {
                    headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON,
                    headers_mod.DATE: now,
                    headers_mod.TIMESTAMP: now
                }

                for _, v in results.table_output.items():
                    for r in v:
                        for key, value in r.iteritems():
                            if isinstance(value, bool):
                                value = int(value)
                            topic = topics.ANALYSIS_VALUE(
                                point=key, **
                                config['device'])  #.replace('{analysis}', key)
                            #print "publishing {}->{}".format(topic, value)
                            self.publish_json(topic, headers, value)

            if results.commands and mode:
                self.commands = results.commands
                if self.keys is None:
                    self.keys = self.commands.keys()
                self.schedule_task()

        def schedule_task(self):
            '''Schedule access to modify device controls.'''
            _log.debug('Schedule Device Access')
            headers = {
                'type': 'NEW_SCHEDULE',
                'requesterID': agent_id,
                'taskID': actuator_id,
                'priority': 'LOW'
            }
            start = datetime.now()
            end = start + td(seconds=300)
            start = str(start)
            end = str(end)
            _log.debug("{campus}/{building}/{unit}".format(**device))
            self.publish_json(
                topics.ACTUATOR_SCHEDULE_REQUEST(), headers,
                [["{campus}/{building}/{unit}".format(**device), start, end]])

        def command_equip(self):
            '''Execute commands on configured device.'''
            self.current_key = self.keys[0]
            value = self.commands[self.current_key]
            headers = {
                'Content-Type': 'text/plain',
                'requesterID': agent_id,
            }
            self.publish(topics.ACTUATOR_SET(point=self.current_key, **device),
                         headers, str(value))

        @matching.match_headers({headers_mod.REQUESTER_ID: agent_id})
        @matching.match_exact(topics.ACTUATOR_SCHEDULE_RESULT())
        def schedule_result(self, topic, headers, message, match):
            '''Actuator response (FAILURE, SUCESS).'''
            print 'Actuator Response'
            msg = jsonapi.loads(message[0])
            msg = msg['result']
            _log.debug('Schedule Device ACCESS')
            if self.keys:
                if msg == "SUCCESS":
                    self.command_equip()
                elif msg == "FAILURE":
                    print 'auto correction failed'
                    _log.debug('Auto-correction of device failed.')

        @matching.match_headers({headers_mod.REQUESTER_ID: agent_id})
        @matching.match_glob(topics.ACTUATOR_VALUE(point='*', **device))
        def on_set_result(self, topic, headers, message, match):
            '''Setting of point on device was successful.'''
            print('Set Success:  {point} - {value}'.format(
                point=self.current_key,
                value=str(self.commands[self.current_key])))
            _log.debug('set_point({}, {})'.format(
                self.current_key, self.commands[self.current_key]))
            self.keys.remove(self.current_key)
            if self.keys:
                self.command_equip()
            else:
                print 'Done with Commands - Release device lock.'
                headers = {
                    'type': 'CANCEL_SCHEDULE',
                    'requesterID': agent_id,
                    'taskID': actuator_id
                }
                self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers,
                                  {})
                self.keys = None

        @matching.match_headers({headers_mod.REQUESTER_ID: agent_id})
        @matching.match_glob(topics.ACTUATOR_ERROR(point='*', **device))
        def on_set_error(self, topic, headers, message, match):
            '''Setting of point on device failed, log failure message.'''
            print 'Set ERROR'
            msg = jsonapi.loads(message[0])
            msg = msg['type']
            _log.debug('Actuator Error: ({}, {}, {})'.format(
                msg, self.current_key, self.commands[self.current_key]))
            self.keys.remove(self.current_key)
            if self.keys:
                self.command_equip()
            else:
                headers = {
                    'type': 'CANCEL_SCHEDULE',
                    'requesterID': agent_id,
                    'taskID': actuator_id
                }
                self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers,
                                  {})
                self.keys = None

    Agent.__name__ = agent_id
    return Agent(**kwargs)
Exemplo n.º 4
0
def driven_agent(config_path, **kwargs):
    """
    Reads agent configuration and converts it to run driven agent.
    :param config_path:
    :param kwargs:
    :return:
    """
    config = utils.load_config(config_path)
    arguments = config.get("arguments")

    actuation_mode = True if config.get("actuation_mode",
                                        "PASSIVE") == "ACTIVE" else False
    actuator_lock_required = config.get("require_actuator_lock", False)

    campus = config["device"].get("campus", "")
    building = config["device"].get("building", "")
    analysis_name = config.get("analysis_name", "analysis_name")
    publish_base = "/".join([analysis_name, campus, building])
    application_name = config.get("pretty_name", analysis_name)
    arguments.update({"analysis_name": analysis_name})

    device_config = config["device"]["unit"]
    multiple_devices = isinstance(device_config, dict)
    command_devices = list(device_config.keys())
    device_topic_dict = {}
    device_topic_list = []
    subdevices_list = []

    interval = config.get("interval", 60)
    vip_destination = config.get("vip_destination", None)
    timezone = config.get("local_timezone", "US/Pacific")

    for device_name in device_config:
        device_topic = topics.DEVICES_VALUE(campus=campus,
                                            building=building,
                                            unit=device_name,
                                            path="",
                                            point="all")

        device_topic_dict.update({device_topic: device_name})
        device_topic_list.append(device_name)
        if multiple_devices:
            for subdevice in device_config[device_name]["subdevices"]:
                subdevices_list.append(subdevice)
                subdevice_topic = topics.DEVICES_VALUE(campus=campus,
                                                       building=building,
                                                       unit=device_name,
                                                       path=subdevice,
                                                       point="all")

                subdevice_name = device_name + "/" + subdevice
                device_topic_dict.update({subdevice_topic: subdevice_name})
                device_topic_list.append(subdevice_name)

    base_actuator_path = topics.RPC_DEVICE_PATH(campus=campus,
                                                building=building,
                                                unit=None,
                                                path="",
                                                point=None)

    device_lock_duration = config.get("device_lock_duration", 10.0)
    conversion_map = config.get("conversion_map")
    missing_data_threshold = config.get("missing_data_threshold", 15.0) / 100.0
    map_names = {}
    for key, value in conversion_map.items():
        map_names[key.lower() if isinstance(key, str) else key] = value

    application = config.get("application")
    validation_error = ""
    if not application:
        validation_error = "Invalid application specified in config\n"
    if validation_error:
        _log.error(validation_error)
        raise ValueError(validation_error)

    converter = ConversionMapper()
    # output_file_prefix = config.get("output_file")

    klass = _get_class(application)
    # This instances is used to call the applications run method when
    # data comes in on the message bus.  It is constructed here
    # so that_process_results each time run is called the application
    # can keep it state.
    # points = arguments.pop("point_mapping")
    app_instance = klass(**arguments)

    class DrivenAgent(Agent):
        """Agent listens to message bus device and runs when data is published.
        """
        def __init__(self, **kwargs):
            """
            Initializes agent
            :param kwargs: Any driver specific parameters"""

            super(DrivenAgent, self).__init__(**kwargs)

            # master is where we copy from to get a poppable list of
            # subdevices that should be present before we run the analysis.
            self.master_devices = device_topic_list
            self.needed_devices = []
            self.device_values = self.master_devices[:]
            self.initialize_devices()
            self.received_input_datetime = None

            self._header_written = False
            self.file_creation_set = set()

            self.actuation_vip = self.vip.rpc
            self.initialize_time = None
            if vip_destination:
                self.agent = setup_remote_actuation(vip_destination)
                self.actuation_vip = self.agent.vip.rpc

        def initialize_devices(self):
            self.needed_devices = self.master_devices[:]
            self.device_values = {}

        @Core.receiver("onstart")
        def startup(self, sender, **kwargs):
            """
            Starts up the agent and subscribes to device topics
            based on agent configuration.
            :param sender:
            :param kwargs: Any driver specific parameters
            :type sender: str
            """
            for device in device_topic_dict:
                _log.info("Subscribing to " + device)
                self.vip.pubsub.subscribe(peer="pubsub",
                                          prefix=device,
                                          callback=self.on_analysis_message)

        def _should_run_now(self):
            """
            Checks if messages from all the devices are received
                before running application
            :returns: True or False based on received messages.
            :rtype: boolean
            """
            # Assumes the unit/all values will have values.
            if not self.device_values.keys():
                return False
            return not self.needed_devices

        def aggregate_subdevice(self, device_data, topic):
            """
            Aggregates device and subdevice data for application
            :returns: True or False based on if device data is needed.
            :rtype: boolean"""
            tagged_device_data = {}
            device_tag = device_topic_dict[topic]
            _log.debug("Current device to aggregate: {}".format(device_tag))
            if device_tag not in self.needed_devices:
                return False
            for key, value in device_data.items():
                device_data_tag = "&".join([key, device_tag])
                tagged_device_data[device_data_tag] = value
            self.device_values.update(tagged_device_data)
            self.needed_devices.remove(device_tag)
            return True

        def on_analysis_message(self, peer, sender, bus, topic, headers,
                                message):
            """
            Subscribe to device data and assemble data set to pass
                to applications.
            :param peer:
            :param sender: device name
            :param bus:
            :param topic: device path topic
            :param headers: message headers
            :param message: message containing points and values dict
                    from device with point type
            :type peer: str
            :type sender: str
            :type bus: str
            :type topic: str
            :type headers: dict
            :type message: dict
            """
            timestamp = parse(headers.get("Date"))
            missing_but_running = False
            if self.initialize_time is None and len(self.master_devices) > 1:
                self.initialize_time = find_reinitialize_time(timestamp)

            if self.initialize_time is not None and timestamp < self.initialize_time:
                if len(self.master_devices) > 1:
                    return

            to_zone = dateutil.tz.gettz(timezone)
            timestamp = timestamp.astimezone(to_zone)
            self.received_input_datetime = timestamp
            _log.debug("Current time of publish: {}".format(timestamp))

            device_data = message[0]
            if isinstance(device_data, list):
                device_data = device_data[0]

            device_needed = self.aggregate_subdevice(device_data, topic)
            if not device_needed:
                fraction_missing = float(len(self.needed_devices)) / len(
                    self.master_devices)
                if fraction_missing > missing_data_threshold:
                    _log.error(
                        "Device values already present, reinitializing at publish: {}"
                        .format(timestamp))
                    self.initialize_devices()
                    device_needed = self.aggregate_subdevice(
                        device_data, topic)
                    return
                missing_but_running = True
                _log.warning(
                    "Device already present. Using available data for diagnostic.: {}"
                    .format(timestamp))
                _log.warning(
                    "Device  already present - topic: {}".format(topic))
                _log.warning("All devices: {}".format(self.master_devices))
                _log.warning("Needed devices: {}".format(self.needed_devices))

            if self._should_run_now() or missing_but_running:
                field_names = {}
                for point, data in self.device_values.items():
                    field_names[point] = data
                if not converter.initialized and conversion_map is not None:
                    converter.setup_conversion_map(map_names, field_names)

                device_data = converter.process_row(field_names)
                results = app_instance.run(timestamp, device_data)
                self.process_results(results)
                self.initialize_devices()
                if missing_but_running:
                    device_needed = self.aggregate_subdevice(
                        device_data, topic)
            else:
                _log.info("Still need {} before running.".format(
                    self.needed_devices))

        def process_results(self, results):
            """
            Runs driven application with converted data. Calls appropriate
                methods to process commands, log and table_data in results.
            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven
            :returns: Same as results param.
            :rtype: Results object \\volttron.platform.agent.driven
            """
            _log.info("Processing Results!")
            actuator_error = False
            if actuation_mode:
                if results.devices and actuator_lock_required:
                    actuator_error = self.actuator_request(results.devices)
                elif results.commands and actuator_lock_required:
                    actuator_error = self.actuator_request(command_devices)
                if not actuator_error:
                    results = self.actuator_set(results)
            for log in results.log_messages:
                _log.info("LOG: {}".format(log))
            for key, value in results.table_output.items():
                _log.info("TABLE: {}->{}".format(key, value))
            #if output_file_prefix is not None:
            #   results = self.create_file_output(results)
            if len(results.table_output.keys()):
                results = self.publish_analysis_results(results)
            return results

        def publish_analysis_results(self, results):
            """
            Publish table_data in analysis results to the message bus for
                capture by the data historian.

            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven
            :returns: Same as results param.
            :rtype: Results object \\volttron.platform.agent.driven
            """
            to_publish = defaultdict(dict)
            for app, analysis_table in list(results.table_output.items()):
                try:
                    name_timestamp = app.split("&")
                    timestamp = name_timestamp[1]
                except:
                    timestamp = self.received_input_datetime
                    timestamp = format_timestamp(timestamp)

                headers = {
                    headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON,
                    headers_mod.DATE: timestamp,
                }
                for entry in analysis_table:
                    for point, result in list(entry.items()):
                        for device in command_devices:
                            publish_topic = "/".join(
                                [publish_base, device, point])
                            analysis_topic = topics.RECORD(
                                subtopic=publish_topic)
                            datatype = str(type(value))
                            to_publish[analysis_topic] = result

                for result_topic, result in to_publish.items():
                    self.vip.pubsub.publish("pubsub", result_topic, headers,
                                            result)
                to_publish.clear()
            return results

        def create_file_output(self, results):
            """
            Create results/data files for testing and algorithm validation
            if table data is present in the results.

            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven
            :returns: Same as results param.
            :rtype: Results object \\volttron.platform.agent.driven"""
            tag = 0
            for key, value in list(results.table_output.items()):
                for row in value:
                    name_timestamp = key.split("&")
                    _name = name_timestamp[0]
                    timestamp = name_timestamp[1]
                    file_name = _name + str(tag) + ".csv"
                    tag += 1
                    if file_name not in self.file_creation_set:
                        self._header_written = False
                    self.file_creation_set.update([file_name])
                    with open(file_name, "a+") as file_to_write:
                        row.update({"Timestamp": timestamp})
                        _keys = list(row.keys())
                        file_output = csv.DictWriter(file_to_write, _keys)
                        if not self._header_written:
                            file_output.writeheader()
                            self._header_written = True
                        file_output.writerow(row)
                    file_to_write.close()
            return results

        def actuator_request(self, command_equip):
            """
            Calls the actuator"s request_new_schedule method to get
                    device schedule
            :param command_equip: contains the names of the devices
                that will be scheduled with the ActuatorAgent.
            :type: dict or list
            :returns: Return result from request_new_schedule method
                and True or False for error in scheduling device.
            :rtype: boolean
            :Return Values:

                request_error = True/False

            warning:: Calling without previously scheduling a device and not within
                         the time allotted will raise a LockError"""

            _now = get_aware_utc_now()
            str_now = format_timestamp(_now)
            _end = _now + td(minutes=device_lock_duration)
            str_end = format_timestamp(_end)
            for device in command_equip:
                actuation_device = base_actuator_path(unit=device, point="")
                schedule_request = [[actuation_device, str_now, str_end]]
                try:
                    _log.info("Make Request {} for start {} and end {}".format(
                        actuation_device, str_now, str_end))
                    result = self.actuation_vip.call(
                        "platform.actuator", "request_new_schedule", "rcx",
                        actuation_device, "HIGH",
                        schedule_request).get(timeout=15)
                except RemoteError as ex:
                    _log.warning(
                        "Failed to schedule device {} (RemoteError): {}".
                        format(device, str(ex)))
                    request_error = True
                if result["result"] == "FAILURE":
                    if result["info"] == "TASK_ID_ALREADY_EXISTS":
                        _log.info("Task to schedule device already exists " +
                                  device)
                        request_error = False
                    else:
                        _log.warning(
                            "Failed to schedule device (unavailable) " +
                            device)
                        request_error = True
                else:
                    request_error = False

            return request_error

        def actuator_set(self, results):
            """
            Calls the actuator"s set_point method to set point on device

            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven"""
            def make_actuator_set(device, point_value_dict):
                for point, new_value in point_value_dict.items():
                    point_path = base_actuator_path(unit=device, point=point)
                    try:
                        _log.info("Set point {} to {}".format(
                            point_path, new_value))
                        result = self.actuation_vip.call(
                            "platform.actuator", "set_point", "rcx",
                            point_path, new_value).get(timeout=15)
                    except RemoteError as ex:
                        _log.warning("Failed to set {} to {}: {}".format(
                            point_path, new_value, str(ex)))
                        continue

            for device, point_value_dict in results.devices.items():
                make_actuator_set(device, point_value_dict)

            for device in command_devices:
                make_actuator_set(device, results.commands)
            return results

    def find_reinitialize_time(current_time):
        midnight = current_time.replace(hour=0,
                                        minute=0,
                                        second=0,
                                        microsecond=0)
        seconds_from_midnight = (current_time - midnight).total_seconds()
        offset = seconds_from_midnight % interval
        previous_in_seconds = seconds_from_midnight - offset
        next_in_seconds = previous_in_seconds + interval
        from_midnight = td(seconds=next_in_seconds)
        _log.debug("Start of next scrape interval: {}".format(midnight +
                                                              from_midnight))
        return midnight + from_midnight

    def setup_remote_actuation(vip_destination):
        event = gevent.event.Event()
        agent = Agent(address=vip_destination)
        gevent.spawn(agent.core.run, event)
        event.wait(timeout=15)
        return agent

    DrivenAgent.__name__ = "DrivenLoggerAgent"
    return DrivenAgent(**kwargs)
def driven_agent(config_path, **kwargs):
    """Driven harness for deployment of OpenEIS applications in VOLTTRON."""
    config = utils.load_config(config_path)
    arguments = config.get('arguments')
    mode = True if config.get('mode', 'PASSIVE') == 'ACTIVE' else False
    multiple_devices = isinstance(config['device']['unit'], dict)
    campus_building_config = config['device']
    analysis_name = campus_building_config.get('analysis_name',
                                               'analysis_name')
    analysis_dict = {'analysis_name': analysis_name}
    arguments.update(analysis_dict)
    agent_id = config.get('agentid', None)
    agent_id = actuator_id = agent_id if agent_id is not None else analysis_name
    campus_building = dict(
        (key, campus_building_config[key]) for key in ['campus', 'building'])
    analysis = deepcopy(campus_building)
    analysis.update(analysis_dict)
    device_config = config['device']['unit']
    command_devices = device_config.keys()
    device_topic_dict = {}
    device_topic_list = []
    subdevices_list = []
    from_file = config.get('from_file')
    for device_name in device_config:
        device_topic = topics.DEVICES_VALUE(
            campus=campus_building.get('campus'),
            building=campus_building.get('building'),
            unit=device_name,
            path='',
            point='all')
        device_topic_dict.update({device_topic: device_name})
        device_topic_list.append(device_name)
        if multiple_devices:
            for subdevice in device_config[device_name]['subdevices']:
                subdevices_list.append(subdevice)
                subdevice_topic = topics.DEVICES_VALUE(
                    campus=campus_building.get('campus'),
                    building=campus_building.get('building'),
                    unit=device_name,
                    path=subdevice,
                    point='all')
                subdevice_name = device_name + "/" + subdevice
                device_topic_dict.update({subdevice_topic: subdevice_name})
                device_topic_list.append(subdevice_name)

    base_actuator_path = topics.ACTUATOR_WRITE(
        campus=campus_building.get('campus', ''),
        building=campus_building.get('building', ''),
        unit=None,
        path='',
        point=None)
    conversion_map = config.get('conversion_map')
    map_names = {}
    for key, value in conversion_map.items():
        map_names[key.lower() if isinstance(key, str) else key] = value
    application = config.get('application')
    validation_error = ''
    if not application:
        validation_error = 'Invalid application specified in config\n'
    if validation_error:
        _log.error(validation_error)
        raise ValueError(validation_error)
    config.update(config.get('arguments'))
    converter = ConversionMapper()
    output_file_prefix = config.get('output_file')
    #unittype_map = config.get('unittype_map', None)
    #assert unittype_map

    klass = _get_class(application)
    # This instances is used to call the applications run method when
    # data comes in on the message bus.  It is constructed here
    # so that_process_results each time run is called the application
    # can keep it state.
    app_instance = klass(**arguments)

    class DrivenAgent(Agent):
        '''Agent listens to message bus device and runs when data is published.
        '''
        def __init__(self, **kwargs):
            super(DrivenAgent, self).__init__(**kwargs)

            # master is where we copy from to get a poppable list of
            # subdevices that should be present before we run the analysis.
            self._master_devices = device_topic_list
            self._needed_devices = []
            self._device_values = {}
            self._initialize_devices()
            self.received_input_datetime = None
            self._kwargs = kwargs

            self._header_written = False
            self.file_creation_set = set()

        def _initialize_devices(self):
            self._needed_devices = deepcopy(self._master_devices)
            self._device_values = {}

        @Core.receiver('onstart')
        def starup(self, sender, **kwargs):
            self._initialize_devices()
            for device_topic in device_topic_dict:
                _log.debug('Subscribing to ' + device_topic)
                self.vip.pubsub.subscribe(peer='pubsub',
                                          prefix=device_topic,
                                          callback=self.on_analysis_message)

        def _should_run_now(self):
            # Assumes the unit/all values will have values.
            if not len(self._device_values.keys()) > 0:
                return False
            return not len(self._needed_devices) > 0

        def on_analysis_message(self, peer, sender, bus, topic, headers,
                                message):
            """Subscribe to device data and assemble data set to pass
            to applications.
            """
            device_data = message[0]
            if isinstance(device_data, list):
                device_data = device_data[0]

            def aggregate_subdevice(device_data):
                tagged_device_data = {}
                device_tag = device_topic_dict[topic]
                if device_tag not in self._needed_devices:
                    return False
                for key, value in device_data.items():
                    device_data_tag = '&'.join([key, device_tag])
                    tagged_device_data[device_data_tag] = value
                self._device_values.update(tagged_device_data)
                self._needed_devices.remove(device_tag)
                return True

            device_needed = aggregate_subdevice(device_data)
            if not device_needed:
                _log.error("Warning device values already present, "
                           "reinitializing")

            if self._should_run_now():
                field_names = {}
                for k, v in self._device_values.items():
                    field_names[k.lower() if isinstance(k, str) else k] = v
                if not converter.initialized and conversion_map is not None:
                    converter.setup_conversion_map(map_names, field_names)
                if from_file:
                    _timestamp = parse(headers.get('Date'))
                    self.received_input_datetime = _timestamp
                else:
                    _timestamp = dt.now()
                    self.received_input_datetime = dt.utcnow()

                device_data = converter.process_row(field_names)
                results = app_instance.run(_timestamp, device_data)
                # results = app_instance.run(
                # dateutil.parser.parse(self._subdevice_values['Timestamp'],
                #                       fuzzy=True), self._subdevice_values)
                self._process_results(results)
                self._initialize_devices()
            else:
                _log.info("Still need {} before running.".format(
                    self._needed_devices))

        def _process_results(self, results):
            """Run driven application with converted data and write the app
            results to a file or database.
            """
            _log.debug('Processing Results!')
            for device, point_value_dict in results.devices.items():
                for key, value in point_value_dict.items():
                    _log.debug("COMMAND TABLE: {}->{}".format(key, value))
                    if mode:
                        _log.debug('ACTUATE ON DEVICE.')
                        results, actuator_error = self.actuator_request(
                            results)
                        if not actuator_error:
                            self.actuator_set(results)

            for key, value in results.commands.iteritems():
                _log.debug("COMMAND TABLE: {}->{}".format(key, value))
                if mode:
                    _log.debug('ACTUATE ON DEVICE.')
                    results, actuator_error = self.actuator_request(results)
                    if not actuator_error:
                        self.actuator_set(results)

            for value in results.log_messages:
                _log.debug("LOG: {}".format(value))
            for key, value in results.table_output.items():
                _log.debug("TABLE: {}->{}".format(key, value))
            if output_file_prefix is not None:
                results = self.create_file_output(results)
            if len(results.table_output.keys()):
                results = self.publish_analysis_results(results)
            return results

        def publish_analysis_results(self, results):
            """publish analysis results to the message bus for
            capture by the data historian
            """
            headers = {
                headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON,
                headers_mod.DATE: str(self.received_input_datetime),
            }
            for app, analysis_table in results.table_output.items():
                try:
                    name_timestamp = app.split('&')
                    _name = name_timestamp[0]
                    timestamp = name_timestamp[1]
                except:
                    _name = app
                    timestamp = str(self.received_input_datetime)
                headers = {
                    headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON,
                    headers_mod.DATE: timestamp,
                }
                for entry in analysis_table:
                    for key, value in entry.items():
                        for _device in command_devices:
                            analysis['unit'] = _device
                            analysis_topic = topics.ANALYSIS_VALUE(point=key,
                                                                   **analysis)
                            datatype = 'float'
                            if isinstance(value, int):
                                datatype = 'int'
                            kbase = key[key.rfind('/') + 1:]
                            message = [{
                                kbase: value
                            }, {
                                kbase: {
                                    'tz': 'US/Pacific',
                                    'type': datatype,
                                    'units': 'float',
                                }
                            }]
                            self.vip.pubsub.publish('pubsub', analysis_topic,
                                                    headers, message)
            return results

        def create_file_output(self, results):
            """Create results/data files for testing and algorithm validation."""
            for key, value in results.table_output.items():
                name_timestamp = key.split('&')
                _name = name_timestamp[0]
                timestamp = name_timestamp[1]
                file_name = output_file_prefix + "-" + _name + ".csv"
                if file_name not in self.file_creation_set:
                    self._header_written = False
                self.file_creation_set.update([file_name])
                for row in value:
                    with open(file_name, 'a+') as file_to_write:
                        row.update({'Timestamp': timestamp})
                        _keys = row.keys()
                        file_output = csv.DictWriter(file_to_write, _keys)
                        if not self._header_written:
                            file_output.writeheader()
                            self._header_written = True
                        file_output.writerow(row)
                    file_to_write.close()
            return results

        def actuator_request(self, results):
            """Make actuaor request for modification of device set points."""
            _now = dt.now()
            str_now = _now.strftime(DATE_FORMAT)
            _end = _now + td(minutes=1)
            str_end = _end.strftime(DATE_FORMAT)
            for _device in command_devices:
                actuation_device = base_actuator_path(unit=_device, point='')
                schedule_request = [[actuation_device, str_now, str_end]]
                #
                #                 try:
                #                     result = self.vip.rpc.call('platform.actuator',
                #                                                'request_new_schedule',
                #                                                agent_id, _device, 'HIGH',
                #                                                schedule_request).get(timeout=4)
                #                 except RemoteError as ex:
                #                     _log.warning("Failed to schedule device {} (RemoteError): {}".format(_device, str(ex)))
                #                     request_error = True
                #
                #                 if result['result'] == 'FAILURE':
                #                     _log.warn('Failed to schedule device (unavailable) ' + _device)
                #                     request_error = True
                #                 else:
                #                     request_error = False
                #                 _log.debug('@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@')
                #                 _log.debug(str(result))
                #                 _log.debug(str_now)
                #                 _log.debug(str_end)

                headers = {
                    'type': 'NEW_SCHEDULE',
                    'requesterID': agent_id,
                    'taskID': actuator_id,
                    'priority': 'HIGH'
                }
                device_path = "{campus}/{building}/".format(**campus_building)

                self.vip.pubsub.publish(
                    peer='pubsub',
                    topic=topics.ACTUATOR_SCHEDULE_REQUEST(),
                    headers=headers,
                    message=[[device_path + _device, str_now, str_end]])
            #

            return results, False

        def actuator_set(self, results):
            """Set point on device."""
            for device, point_value_dict in results.devices.items():
                for point, value in point_value_dict.items():
                    point_path = base_actuator_path(unit=device, point=point)
                    try:
                        #                         result = self.vip.rpc.call('platform.actuator', 'set_point',
                        #                                                    agent_id, point_path,
                        #                                                    new_value).get(timeout=4)
                        headers = {
                            'Content-Type': 'text/plain',
                            'requesterID': agent_id,
                        }
                        self.vip.pubsub.publish(peer="pubsub",
                                                topic=topics.ACTUATOR_SET(
                                                    point=point,
                                                    unit=device,
                                                    **campus_building),
                                                headers=headers,
                                                message=str(value))
                        _log.debug("Set point {} to {}".format(
                            point_path, value))
                    except RemoteError as ex:
                        _log.warning("Failed to set {} to {}: {}".format(
                            point_path, value, str(ex)))
                        continue

            for _device in command_devices:
                for point, new_value in results.commands.items():
                    point_path = base_actuator_path(unit=_device, point=point)
                    try:
                        #                         result = self.vip.rpc.call('platform.actuator', 'set_point',
                        #                                                    agent_id, point_path,
                        #                                                    new_value).get(timeout=4)
                        headers = {
                            'Content-Type': 'text/plain',
                            'requesterID': agent_id,
                        }
                        self.vip.pubsub.publish(peer="pubsub",
                                                topic=topics.ACTUATOR_SET(
                                                    point=point,
                                                    unit=_device,
                                                    **campus_building),
                                                headers=headers,
                                                message=str(new_value))
                        _log.debug("Set point {} to {}".format(
                            point_path, new_value))
                    except RemoteError as ex:
                        _log.warning("Failed to set {} to {}: {}".format(
                            point_path, new_value, str(ex)))
                        continue

    DrivenAgent.__name__ = 'DrivenLoggerAgent'
    return DrivenAgent(**kwargs)
Exemplo n.º 6
0
def driven_agent(config_path, **kwargs):
    """
    Reads agent configuration and converts it to run driven agent.
    :param config_path:
    :param kwargs:
    :return:
    """
    config = utils.load_config(config_path)
    arguments = config.get("arguments")

    application = config.get("application")
    analysis_name = config.get("analysis_name", "analysis_name")
    # application_name = config.get("pretty_name", analysis_name)
    arguments.update({"analysis_name": analysis_name})

    actuation_mode = True if config.get("actuation_mode",
                                        "PASSIVE") == "ACTIVE" else False
    actuator_lock_required = config.get("require_actuator_lock", False)
    interval = config.get("interval", 60)
    vip_destination = config.get("vip_destination", None)
    timezone = config.get("local_timezone", "US/Pacific")
    device_lock_duration = config.get("device_lock_duration", 10.0)
    conversion_map = config.get("conversion_map")
    missing_data_threshold = config.get("missing_data_threshold", 90.0)

    device = config["device"]
    validation_error = ""
    if not application:
        validation_error = "Invalid application specified in config\n"
    if validation_error:
        _log.error(validation_error)
        raise ValueError(validation_error)

    converter = ConversionMapper()
    # output_file_prefix = config.get("output_file")

    klass = _get_class(application)
    # This instances is used to call the applications run method when
    # data comes in on the message bus.  It is constructed here
    # so that_process_results each time run is called the application
    # can keep it state.
    # points = arguments.pop("point_mapping")
    app_instance = klass(**arguments)

    class DrivenAgent(Agent):
        """Agent listens to message bus device and runs when data is published.
        """
        def __init__(self,
                     device,
                     actuation_mode=False,
                     actuator_lock_required=False,
                     interval=60,
                     vip_destination=None,
                     timezone="US/Pacific",
                     device_lock_duration=10.0,
                     conversion_map=None,
                     missing_data_threshold=90.0,
                     **kwargs):
            """
            Initializes agent
            :param kwargs: Any driver specific parameters"""

            super(DrivenAgent, self).__init__(**kwargs)

            self.sites_config_list = []
            self.device_topic_dict = {}
            self.site_topic_dict = {}

            self.default_config = {
                "device": device,
                "actuation_mode": actuation_mode,
                "actuator_lock_required": actuator_lock_required,
                "interval": interval,
                "vip_destination": vip_destination,
                "timezone": timezone,
                "device_lock_duration": device_lock_duration,
                "conversion_map": conversion_map,
                "missing_data_threshold": missing_data_threshold
            }
            self.vip.config.set_default("config", self.default_config)
            self.vip.config.subscribe(self.configure_main,
                                      actions=["NEW", "UPDATE"],
                                      pattern="config")
            self.vip.config.subscribe(self.update_driver,
                                      actions=["NEW", "UPDATE"],
                                      pattern="devices/*")
            self.vip.config.subscribe(self.remove_driver,
                                      actions="DELETE",
                                      pattern="devices/*")

            # master is where we copy from to get a poppable list of
            # subdevices that should be present before we run the analysis.
            self.received_input_datetime = None

            self._header_written = False
            self.file_creation_set = set()
            self.initialize_time = None

        def configure_main(self, config_name, action, contents):
            config = self.default_config.copy()
            config.update(contents)
            _log.info("configure_main with {}".format(config))

            self.unsubscribe_from_all_devices()

            self.actuation_mode = True if config.get(
                "actuation_mode", "PASSIVE") == "ACTIVE" else False
            self.actuator_lock_required = config.get("require_actuator_lock",
                                                     False)
            self.interval = config.get("interval", 60)
            self.vip_destination = config.get("vip_destination", None)
            self.timezone = config.get("local_timezone", "US/Pacific")
            self.device_lock_duration = config.get("device_lock_duration",
                                                   10.0)
            self.conversion_map = config.get("conversion_map")
            self.missing_data_threshold = config.get("missing_data_threshold",
                                                     50.0) / 100.0

            self.actuation_vip = self.vip.rpc
            if self.vip_destination:
                self.agent = setup_remote_actuation(self.vip_destination)
                self.actuation_vip = self.agent.vip.rpc

            self.map_names = {}
            if self.conversion_map:
                for key, value in self.conversion_map.items():
                    self.map_names[key.lower(
                    ) if isinstance(key, str) else key] = value

            _log.info("--- actuation_mode {}".format(self.actuation_mode))
            _log.info("--- require_actuator_lock {}".format(
                self.actuator_lock_required))
            _log.info("--- interval {}".format(self.interval))
            _log.info("--- vip_destination {}".format(self.vip_destination))
            _log.info("--- local_timezone {}".format(self.timezone))
            _log.info("--- device_lock_duration {}".format(
                self.device_lock_duration))
            _log.info("--- missing_data_threshold {}".format(
                self.missing_data_threshold))
            _log.info("--- conversion_map {}".format(self.conversion_map))
            _log.info("--- map_names {}".format(self.map_names))

            self.sites = config["device"]
            if not isinstance(self.sites, list):
                self.sites = [self.sites]

            self.sites_config_list = []
            self.site_topic_dict = {}
            self.device_topic_dict = {}

            for site in self.sites:
                campus = site.get("campus", "")
                building = site.get("building", "")
                site_name = "/".join([campus, building])
                publish_base = "/".join([analysis_name, campus, building])

                device_config = site["unit"]
                multiple_devices = isinstance(device_config, dict)
                command_devices = device_config.keys()
                site_device_topic_dict = {}
                device_topic_list = []
                subdevices_list = []

                base_actuator_path = topics.RPC_DEVICE_PATH(campus=campus,
                                                            building=building,
                                                            unit=None,
                                                            path="",
                                                            point=None)

                site_dict = {
                    'site_name': site_name,
                    'publish_base': publish_base,
                    'multiple_devices': multiple_devices,
                    'device_topic_dict': site_device_topic_dict,
                    'device_topic_list': device_topic_list,
                    'subdevices_list': subdevices_list,
                    'command_devices': command_devices,
                    'base_actuator_path': base_actuator_path
                }
                if 'point_mapping' in site:
                    site_dict['point_mapping'] = site['point_mapping']
                self.sites_config_list.append(site_dict)

                for device_name in device_config:
                    device_topic = topics.DEVICES_VALUE(campus=campus,
                                                        building=building,
                                                        unit=device_name,
                                                        path="",
                                                        point="all")

                    self.site_topic_dict.update({device_topic: site_dict})
                    self.device_topic_dict.update({device_topic: device_name})
                    site_device_topic_dict.update({device_topic: device_name})
                    device_topic_list.append(device_name)
                    _log.info("device_topic_list topic {} -> device {}".format(
                        device_topic, device_name))
                    if multiple_devices:
                        for subdevice in device_config[device_name][
                                "subdevices"]:
                            if subdevice not in subdevices_list:
                                subdevices_list.append(subdevice)
                            subdevice_topic = topics.DEVICES_VALUE(
                                campus=campus,
                                building=building,
                                unit=device_name,
                                path=subdevice,
                                point="all")

                            subdevice_name = device_name + "/" + subdevice
                            self.site_topic_dict.update(
                                {subdevice_topic: site_dict})
                            self.device_topic_dict.update(
                                {subdevice_topic: subdevice_name})
                            site_device_topic_dict.update(
                                {subdevice_topic: subdevice_name})
                            device_topic_list.append(subdevice_name)
                            _log.info(
                                "device_topic_list topic {} -> subdev {}".
                                format(subdevice_topic, subdevice_name))
                _log.info("-- Site config {}".format(site_dict))

            self.initialize_devices()
            self.subscribe_to_all_devices()

        def derive_device_topic(self, config_name):
            _, topic = config_name.split('/', 1)
            # remove any #prefix from the config name which is only used to differentiate config keys
            return topic.split('#', 1)[0]

        def derive_device_unit(self, config_name, contents):
            if 'unit' in contents:
                return contents['unit']
            _, topic = config_name.split('/', 1)
            if '#' in topic:
                return topic.split('#', 1)[1]
            return None

        def update_driver(self, config_name, action, contents):
            topic = self.derive_device_topic(config_name)
            topic_split = topic.split('/', 2)
            if len(topic_split) > 1:
                campus = topic_split[0]
                building = topic_split[1]
            if len(topic_split) > 2:
                unit = topic_split[2]
            else:
                unit = ""
            site_name = "/".join([campus, building])
            publish_base = "/".join([analysis_name, campus, building])
            command_devices = []
            site_device_topic_dict = {}
            device_topic_list = []
            subdevices_list = []

            base_actuator_path = topics.RPC_DEVICE_PATH(campus=campus,
                                                        building=building,
                                                        unit=None,
                                                        path="",
                                                        point=None)

            site_dict = {
                'site_name': site_name,
                'publish_base': publish_base,
                'multiple_devices': False,
                'device_topic_dict': site_device_topic_dict,
                'device_topic_list': device_topic_list,
                'subdevices_list': subdevices_list,
                'command_devices': command_devices,
                'base_actuator_path': base_actuator_path
            }
            if 'point_mapping' in contents:
                site_dict['point_mapping'] = contents['point_mapping']
            if not unit:
                # lookup the subdevices from point_mapping
                for point in contents['point_mapping'].keys():
                    # remove the point name to get the subdevice
                    subdevice_name = point.rsplit('/', 1)[0]
                    sd_split = subdevice_name.rsplit('/', 1)
                    device_name = sd_split[0]
                    subdevice = ''
                    if len(sd_split) > 1:
                        subdevice = sd_split[1]
                    if subdevice not in subdevices_list:
                        subdevices_list.append(subdevice)
                        command_devices.append(subdevice)
                    subdevice_topic = topics.DEVICES_VALUE(campus=campus,
                                                           building=building,
                                                           unit=device_name,
                                                           path=subdevice,
                                                           point="all")
                    self.site_topic_dict.update({subdevice_topic: site_dict})
                    self.device_topic_dict.update(
                        {subdevice_topic: subdevice_name})
                    site_device_topic_dict.update(
                        {subdevice_topic: subdevice_name})
                    device_topic_list.append(subdevice_name)
                    _log.info("device_topic_list topic {} -> subdev {}".format(
                        subdevice_topic, subdevice_name))

            self.sites_config_list.append(site_dict)
            device_topic = topics.DEVICES_VALUE(campus=campus,
                                                building=building,
                                                unit=unit,
                                                path="",
                                                point="all")

            if device_topic in self.device_topic_dict:
                self.unsubscribe_from_device(device_topic)

            self.site_topic_dict.update({device_topic: site_dict})
            if unit:
                self.device_topic_dict.update({device_topic: unit})
                site_device_topic_dict.update({device_topic: unit})
                device_topic_list.append(unit)
                command_devices.append(unit)

            # overrides the publishing unit topic, which is needed for split topics
            override_unit = self.derive_device_unit(config_name, contents)
            if override_unit:
                del command_devices[:]
                command_devices.append(override_unit)

            _log.info("device_topic_list topic {} -> device {}".format(
                device_topic, unit))
            self.initialize_device(site_dict)
            _log.info("-- Site config {}".format(site_dict))
            for dt in site_device_topic_dict.keys():
                self.subscribe_to_device(dt)

        def remove_driver(self, config_name, action, contents):
            topic = self.derive_device_topic(config_name)
            topic_split = topic.split('/', 2)
            if len(topic_split) > 1:
                campus = topic_split[0]
                building = topic_split[1]
            if len(topic_split) > 2:
                unit = topic_split[2]
            else:
                unit = ""
            device_topic = topics.DEVICES_VALUE(campus=campus,
                                                building=building,
                                                unit=unit,
                                                path="",
                                                point="all")

            self.site_topic_dict.pop(device_topic, None)
            self.device_topic_dict.pop(device_topic, None)
            self.unsubscribe_from_device(device_topic)

        def initialize_devices(self):
            for site in self.sites_config_list:
                self.initialize_device(site)

        def initialize_device(self, site):
            _log.info("initialize_device {}".format(site))
            site['needed_devices'] = site['device_topic_list'][:]
            if 'device_values' in site:
                site['device_values'].clear()
            else:
                site['device_values'] = {}

        @Core.receiver("onstart")
        def startup(self, sender, **kwargs):
            """
            Starts up the agent and subscribes to device topics
            based on agent configuration.
            :param sender:
            :param kwargs: Any driver specific parameters
            :type sender: str
            """
            pass

        def unsubscribe_from_device(self, device):
            _log.info("Unsubscribing to " + device)
            self.vip.pubsub.unsubscribe(peer="pubsub",
                                        prefix=device,
                                        callback=self.on_analysis_message)

        def unsubscribe_from_all_devices(self):
            for device in self.device_topic_dict:
                self.unsubscribe_from_device(device)

        def subscribe_to_device(self, device):
            _log.info("Subscribing to " + device)
            self.vip.pubsub.subscribe(peer="pubsub",
                                      prefix=device,
                                      callback=self.on_analysis_message)

        def subscribe_to_all_devices(self):
            for device in self.device_topic_dict:
                self.subscribe_to_device(device)

        def _should_run_now(self, topic):
            """
            Checks if messages from all the devices are received
                before running application
            :returns: True or False based on received messages.
            :rtype: boolean
            """
            # Assumes the unit/all values will have values.
            _log.info("_should_run_now topic {} ".format(topic))
            site = self.site_topic_dict[topic]
            device_values = site['device_values']
            _log.info("_should_run_now check device_values {} ".format(
                device_values))
            if not device_values.keys():
                _log.info("_should_run_now FALSE")
                return False
            needed_devices = site['needed_devices']
            _log.info("_should_run_now check needed_devices {} ".format(
                needed_devices))
            return not needed_devices

        def aggregate_subdevice(self, device_data, topic):
            """
            Aggregates device and subdevice data for application
            :returns: True or False based on if device data is needed.
            :rtype: boolean"""
            result = True
            tagged_device_data = {}
            device_tag = self.device_topic_dict[topic]
            site = self.site_topic_dict[topic]
            needed_devices = site['needed_devices']
            device_values = site['device_values']
            _log.info(
                "Current device to aggregate: topic {} device: {}".format(
                    topic, device_tag))
            if device_tag not in needed_devices:
                result = False
            # optional eg: 'SomeFanSpeed' -> 'supply_fan_speed'
            mappings = site.get('point_mapping', {})
            _log.info("--- device_data -> {}".format(device_data))
            _log.info("--- mappings -> {}".format(mappings))
            for key, value in device_data.items():
                # weird ... bug
                if key.endswith(device_tag):
                    _log.warning(
                        "--- weird entry in device_data ? {} -> {}".format(
                            key, value))
                    _log.warning("--- device_tag ? {}".format(device_tag))
                    key = key[:-len(device_tag) - 1]

                # here do the mapping between the actual device topic
                # and the APP expected topic names
                k = key
                if key in mappings:
                    k = mappings[key]
                else:
                    long_key = '/'.join([device_tag, key])
                    if long_key in mappings:
                        k = mappings[long_key]

                device_data_tag = "&".join([k, device_tag])
                tagged_device_data[device_data_tag] = value
            _log.info(
                "--- tagged_device_data -> {}".format(tagged_device_data))
            device_values.update(tagged_device_data)
            _log.info("--- device_values -> {}".format(device_values))
            if device_tag in needed_devices:
                needed_devices.remove(device_tag)
                _log.info("--- needed_devices removed [{}] -> {}".format(
                    device_tag, needed_devices))
            return result

        def on_analysis_message(self, peer, sender, bus, topic, headers,
                                message):
            """
            Subscribe to device data and assemble data set to pass
                to applications.
            :param peer:
            :param sender: device name
            :param bus:
            :param topic: device path topic
            :param headers: message headers
            :param message: message containing points and values dict
                    from device with point type
            :type peer: str
            :type sender: str
            :type bus: str
            :type topic: str
            :type headers: dict
            :type message: dict
            """
            _log.info("on_analysis_message: from device {} topic -> {}".format(
                sender, topic))
            _log.info("on_analysis_message: {} -> {}".format(headers, message))
            site = self.site_topic_dict.get(topic)
            if not site:
                _log.error("No Site configured for topic: {}".format(topic))
                return

            needed_devices = site['needed_devices']
            device_values = site['device_values']
            master_devices = site['device_topic_list']

            timestamp = parse(headers.get("Date"))
            missing_but_running = False
            if self.initialize_time is None and len(master_devices) > 1:
                self.initialize_time = self.find_reinitialize_time(timestamp)

            if self.initialize_time is not None and timestamp < self.initialize_time:
                if len(master_devices) > 1:
                    _log.info(
                        "on_analysis_message: waiting until initialize_time: {}"
                        .format(self.initialize_time))
                    return

            to_zone = dateutil.tz.gettz(self.timezone)
            timestamp = timestamp.astimezone(to_zone)
            self.received_input_datetime = timestamp
            _log.info(
                "on_analysis_message: Current time of publish: {}".format(
                    timestamp))

            device_data = message[0]
            if isinstance(device_data, list):
                device_data = device_data[0]

            device_needed = self.aggregate_subdevice(device_data, topic)
            if not device_needed:
                fraction_missing = float(
                    len(needed_devices)) / len(master_devices)
                _log.warning(
                    "on_analysis_message: No device_needed: {} fraction_missing = {}"
                    .format(topic, fraction_missing))
                if fraction_missing > self.missing_data_threshold:
                    _log.error(
                        "on_analysis_message: Device values already present, reinitializing at publish: {}"
                        .format(timestamp))
                    self.initialize_device(site)
                    device_needed = self.aggregate_subdevice(
                        device_data, topic)
                    return
                missing_but_running = True
                _log.warning(
                    "on_analysis_message: Device already present. Using available data for diagnostic.: {}"
                    .format(timestamp))
                _log.warning(
                    "on_analysis_message: Device already present - topic: {}".
                    format(topic))
                _log.warning("on_analysis_message: All devices: {}".format(
                    master_devices))
                _log.warning("on_analysis_message: Needed devices: {}".format(
                    needed_devices))

            srn = self._should_run_now(topic)
            _log.info("on_analysis_message: _should_run_now {} or {}".format(
                srn, missing_but_running))
            if srn or missing_but_running:
                field_names = {}
                _log.info(
                    "on_analysis_message: Running for topic {}".format(topic))
                for point, data in device_values.items():
                    _log.info("on_analysis_message: --- point, data: {} -> {}".
                              format(point, data))
                    field_names[point] = data
                if not converter.initialized and conversion_map is not None:
                    converter.setup_conversion_map(self.map_names, field_names)

                results = app_instance.run(timestamp,
                                           converter.process_row(field_names))
                self.process_results(site, results)
                self.initialize_device(site)
                if missing_but_running:
                    device_needed = self.aggregate_subdevice(
                        device_data, topic)
            else:
                _log.info("on_analysis_message: Still need {} before running.".
                          format(needed_devices))

        def process_results(self, site, results):
            """
            Runs driven application with converted data. Calls appropriate
                methods to process commands, log and table_data in results.
            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven
            :returns: Same as results param.
            :rtype: Results object \\volttron.platform.agent.driven
            """
            _log.info("Processing Results!")
            actuator_error = False
            if self.actuation_mode:
                if results.devices and self.actuator_lock_required:
                    actuator_error = self.actuator_request(
                        site, results.devices)
                elif results.commands and self.actuator_lock_required:
                    actuator_error = self.actuator_request(
                        site, site['command_devices'])
                if not actuator_error:
                    results = self.actuator_set(site, results)
            for log in results.log_messages:
                _log.info("LOG: {}".format(log))
            for key, value in results.table_output.items():
                _log.info("TABLE: {}->{}".format(key, value))
            # if output_file_prefix is not None:
            #   results = self.create_file_output(results)
            if len(results.table_output.keys()):
                results = self.publish_analysis_results(site, results)
            return results

        def publish_analysis_results(self, site, results):
            """
            Publish table_data in analysis results to the message bus for
                capture by the data historian.

            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven
            :returns: Same as results param.
            :rtype: Results object \\volttron.platform.agent.driven
            """
            to_publish = defaultdict(dict)
            for app, analysis_table in results.table_output.items():
                try:
                    name_timestamp = app.split("&")
                    timestamp = name_timestamp[1]
                except:
                    timestamp = self.received_input_datetime
                    timestamp = format_timestamp(timestamp)

                headers = {
                    headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON,
                    headers_mod.DATE: timestamp,
                }
                for entry in analysis_table:
                    for point, result in entry.items():
                        for device in site['command_devices']:
                            publish_topic = "/".join(
                                [site['publish_base'], device, point])
                            analysis_topic = topics.RECORD(
                                subtopic=publish_topic)
                            # datatype = str(type(value))
                            to_publish[analysis_topic] = result

                for result_topic, result in to_publish.items():
                    self.vip.pubsub.publish("pubsub", result_topic, headers,
                                            result)
                to_publish.clear()
            return results

        def create_file_output(self, results):
            """
            Create results/data files for testing and algorithm validation
            if table data is present in the results.

            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven
            :returns: Same as results param.
            :rtype: Results object \\volttron.platform.agent.driven"""
            tag = 0
            for key, value in results.table_output.items():
                for row in value:
                    name_timestamp = key.split("&")
                    _name = name_timestamp[0]
                    timestamp = name_timestamp[1]
                    file_name = _name + str(tag) + ".csv"
                    tag += 1
                    if file_name not in self.file_creation_set:
                        self._header_written = False
                    self.file_creation_set.update([file_name])
                    with open(file_name, "a+") as file_to_write:
                        row.update({"Timestamp": timestamp})
                        _keys = row.keys()
                        file_output = csv.DictWriter(file_to_write, _keys)
                        if not self._header_written:
                            file_output.writeheader()
                            self._header_written = True
                        file_output.writerow(row)
                    file_to_write.close()
            return results

        def actuator_request(self, site, command_equip):
            """
            Calls the actuator"s request_new_schedule method to get
                    device schedule
            :param command_equip: contains the names of the devices
                that will be scheduled with the ActuatorAgent.
            :type: dict or list
            :returns: Return result from request_new_schedule method
                and True or False for error in scheduling device.
            :rtype: boolean
            :Return Values:

                request_error = True/False

            warning:: Calling without previously scheduling a device and not within
                         the time allotted will raise a LockError"""

            _now = get_aware_utc_now()
            str_now = format_timestamp(_now)
            _end = _now + td(minutes=self.device_lock_duration)
            str_end = format_timestamp(_end)
            for device in command_equip:
                actuation_device = site['base_actuator_path'](unit=device,
                                                              point="")
                schedule_request = [[actuation_device, str_now, str_end]]
                try:
                    _log.info("Make Request {} for start {} and end {}".format(
                        actuation_device, str_now, str_end))
                    result = self.actuation_vip.call(
                        "platform.actuator", "request_new_schedule", "rcx",
                        actuation_device, "HIGH",
                        schedule_request).get(timeout=15)
                except RemoteError as ex:
                    _log.warning(
                        "Failed to schedule device {} (RemoteError): {}".
                        format(device, str(ex)))
                    request_error = True
                if result["result"] == "FAILURE":
                    if result["info"] == "TASK_ID_ALREADY_EXISTS":
                        _log.info("Task to schedule device already exists " +
                                  device)
                        request_error = False
                    else:
                        _log.warning(
                            "Failed to schedule device (unavailable) " +
                            device)
                        request_error = True
                else:
                    request_error = False

            return request_error

        def actuator_set(self, site, results):
            """
            Calls the actuator"s set_point method to set point on device

            :param results: Results object containing commands for devices,
                    log messages and table data.
            :type results: Results object \\volttron.platform.agent.driven"""
            def make_actuator_set(device, point_value_dict):
                for point, new_value in point_value_dict.items():
                    point_path = site['base_actuator_path'](unit=device,
                                                            point=point)
                    try:
                        _log.info("Set point {} to {}".format(
                            point_path, new_value))
                        self.actuation_vip.call("platform.actuator",
                                                "set_point", "rcx", point_path,
                                                new_value).get(timeout=15)
                    except RemoteError as ex:
                        _log.warning("Failed to set {} to {}: {}".format(
                            point_path, new_value, str(ex)))
                        continue

            for device, point_value_dict in results.devices.items():
                make_actuator_set(device, point_value_dict)

            for device in site['command_devices']:
                make_actuator_set(device, results.commands)
            return results

        def find_reinitialize_time(self, current_time):
            midnight = current_time.replace(hour=0,
                                            minute=0,
                                            second=0,
                                            microsecond=0)
            seconds_from_midnight = (current_time - midnight).total_seconds()
            offset = seconds_from_midnight % self.interval
            previous_in_seconds = seconds_from_midnight - offset
            next_in_seconds = previous_in_seconds + self.interval
            from_midnight = td(seconds=next_in_seconds)
            _log.debug(
                "Start of next scrape interval: {}".format(midnight +
                                                           from_midnight))
            return midnight + from_midnight

    def setup_remote_actuation(vip_destination):
        event = gevent.event.Event()
        agent = Agent(address=vip_destination)
        gevent.spawn(agent.core.run, event)
        event.wait(timeout=15)
        return agent

    DrivenAgent.__name__ = "DrivenLoggerAgent"
    return DrivenAgent(device, actuation_mode, actuator_lock_required,
                       interval, vip_destination, timezone,
                       device_lock_duration, conversion_map,
                       missing_data_threshold, **kwargs)