示例#1
0
文件: omf.py 项目: m0fff/FogLAMP
def plugin_init(data):
    """ Initializes the OMF plugin for the sending of blocks of readings to the PI Connector.
    Args:
    Returns:
    Raises:
        PluginInitializeFailed
    Todo:
    """
    global _config
    global _config_omf_types
    global _config_omf_types_from_manager
    global _logger
    global _recreate_omf_objects

    try:
        # note : _module_name is used as __name__ refers to the Sending Process
        logger_name = _MODULE_NAME + "_" + str(data['stream_id']['value'])
        _logger = logger.setup(logger_name) if _log_debug_level == 0 else \
                  logger.setup(logger_name, level=logging.INFO if _log_debug_level == 1 else logging.DEBUG)
    except Exception as ex:
        _logger.error("{0} - ERROR - {1}".format(time.strftime("%Y-%m-%d %H:%M:%S:"), plugin_common.MESSAGES_LIST["e000012"].format(str(ex))))
        raise ex
    _logger.debug("{0} - ".format("plugin_info"))

    # Retrieves the configurations and apply the related conversions
    _config['_CONFIG_CATEGORY_NAME'] = data['_CONFIG_CATEGORY_NAME']
    _config['URL'] = data['URL']['value']
    _config['producerToken'] = data['producerToken']['value']
    _config['OMFMaxRetry'] = int(data['OMFMaxRetry']['value'])
    _config['OMFRetrySleepTime'] = int(data['OMFRetrySleepTime']['value'])
    _config['OMFHttpTimeout'] = int(data['OMFHttpTimeout']['value'])
    _config['StaticData'] = ast.literal_eval(data['StaticData']['value'])
    # TODO: compare instance fetching via inspect vs as param passing
    # import inspect
    # _config['sending_process_instance'] = inspect.currentframe().f_back.f_locals['self']
    _config['sending_process_instance'] = data['sending_process_instance']

    # _config_omf_types = json.loads(data['omf_types']['value'])
    _config_omf_types = _config['sending_process_instance']._fetch_configuration(cat_name=_CONFIG_CATEGORY_OMF_TYPES_NAME,
                                                                      cat_desc=_CONFIG_CATEGORY_OMF_TYPES_DESCRIPTION,
                                                                      cat_config=_CONFIG_DEFAULT_OMF_TYPES,
                                                                      cat_keep_original=True)

    # Converts the value field from str to a dict
    for item in _config_omf_types:
        if _config_omf_types[item]['type'] == 'JSON':
            # The conversion from a dict to str changes the case and it should be fixed before the conversion
            value = _config_omf_types[item]['value'].replace("true", "True")
            new_value = ast.literal_eval(value)
            _config_omf_types[item]['value'] = new_value

    _logger.debug("{0} - URL {1}".format("plugin_init", _config['URL']))
    try:
        _recreate_omf_objects = True
    except Exception as ex:
        _logger.error(plugin_common.MESSAGES_LIST["e000011"].format(ex))
        raise plugin_exceptions.PluginInitializeFailed(ex)
    return _config
示例#2
0
 def test_logger_destination_error(self):
     """
     Test Error gets returned when destination isn't 0 or 1
     :assert: 
         Assert ValueError is returned when destination=2
     """
     with pytest.raises(ValueError) as error_exec:
         logger.setup(__name__, destination=2)
     assert "ValueError: Invalid destination 2" in str(error_exec)
示例#3
0
    def __init__(self, _storage):
        self._storage = _storage

        if not Restore._logger:
            Restore._logger = logger.setup(self._MODULE_NAME,
                                           destination=_LOGGER_DESTINATION,
                                           level=_LOGGER_LEVEL)
示例#4
0
    def __init__(self):

        super().__init__()

        if not self._logger:
            self._logger = logger.setup(self._MODULE_NAME,
                                        destination=_LOGGER_DESTINATION,
                                        level=_LOGGER_LEVEL)

        # Handled Restore command line parameters
        try:
            self._backup_id = super().get_arg_value("--backup-id")
            self._file_name = super().get_arg_value("--file")
        except Exception as _ex:

            _message = _MESSAGES_LIST["e000003"].format(_ex)
            _logger.exception(_message)

            raise exceptions.ArgumentParserError(_message)

        self._restore_lib = lib.BackupRestoreLib(self._storage, self._logger)

        self._job = lib.Job()

        self._force_restore = True
        """ Restore a backup doesn't exist in the backups table """

        # Creates the objects references used by the library
        lib._logger = self._logger
        lib._storage = self._storage
示例#5
0
 def test_logger_type(self):
     """
     Test the logger type being returned at setup
     :assert:
        Assert that setup returns value of type logger.Logger
     """
     assert isinstance(logger.setup(__name__), logging.Logger) is True
示例#6
0
async def get_backups(request):
    """
    Returns a list of all backups

    :Example: curl -X GET  http://localhost:8082/foglamp/backup
    :Example: curl -X GET  http://localhost:8082/foglamp/backup?limit=2&skip=1&status=complete
    """
    try:
        limit = int(
            request.query['limit']) if 'limit' in request.query else None
        skip = int(request.query['skip']) if 'skip' in request.query else None
        status = request.query['status'] if 'status' in request.query else None
        # TODO : Fix after actual implementation
        Backup.get_backup_list.return_value = [{
            'id': 28,
            'date': '2017-08-30 04:05:10.382',
            'status': 'running'
        }, {
            'id': 27,
            'date': '2017-08-29 04:05:13.392',
            'status': 'failed'
        }, {
            'id': 26,
            'date': '2017-08-28 04:05:08.201',
            'status': 'complete'
        }]

        # backup_json = [{"id": b[0], "date": b[1], "status": b[2]}
        #                for b in Backup.get_backup_list(limit=limit, skip=skip, status=status)]
        backup_json = Backup.get_backup_list(limit=limit,
                                             skip=skip,
                                             status=status)

        # ##  Test #########################################################################################:

        _logger = logger.setup("BACKUP-API-TEST",
                               destination=logger.SYSLOG,
                               level=logging.DEBUG)

        _logger.info("get_backups - START 3 ")

        _storage = connect.get_storage()
        backup = backup_postgres.Backup(_storage)
        backup_json = backup.get_all_backups(999, 0, None)

        _logger.debug("get_backups - END ")

        _logger.handlers = []
        _logger.removeHandler(_logger.handle)
        _logger = None

        del backup

        # ##  ##########################################################+###############################:

    except Backup.DoesNotExist:
        raise web.HTTPNotFound(
            reason='No backups found for queried parameters')
    return web.json_response({"backups": backup_json})
示例#7
0
 def test_destination_consule(self):
     """
     Test the logger type being returned when destination=1
     :assert:
         Assert that the setup returns value of type logging.Logger
     """
     assert isinstance(logger.setup(__name__, destination=1),
                       logging.Logger) is True
示例#8
0
 def test_logger_handler(self):
     """
     Test that handler has been added
     :assert: 
         Assert hasHandler returns True
     """
     output = logger.setup(__name__)
     assert output.hasHandlers() is True
示例#9
0
    def __init__(self, _storage):
        self._storage = _storage

        if not Backup._logger:
            Backup._logger = logger.setup(self._MODULE_NAME,
                                          destination=_LOGGER_DESTINATION,
                                          level=_LOGGER_LEVEL)

        self._backup_lib = lib.BackupRestoreLib(self._storage, self._logger)
示例#10
0
    def __init__(self):

        self._logger = logger.setup("SMNTR", level=logging.INFO)

        self._monitor_loop_task = None  # type: asyncio.Task
        """Task for :meth:`_monitor_loop`, to ensure it has finished"""
        self._sleep_interval = None  # type: int
        """The time (in seconds) to sleep between health checks"""
        self._ping_timeout = None  # type: int
        """Timeout for a response from any given micro-service"""
示例#11
0
    def start(self):
        # Command line parameter handling
        global _log_performance
        global _LOGGER

        # Setups signals handlers, to properly handle the termination
        # a) SIGTERM - 15 : kill or system shutdown
        signal.signal(signal.SIGTERM, SendingProcess._signal_handler)

        try:
            self._mgt_name, self._mgt_port, self._mgt_address, self.input_stream_id, self._log_performance, self._log_debug_level = \
                handling_input_parameters()
            _log_performance = self._log_performance

        except Exception as ex:
            message = _MESSAGES_LIST["e000017"].format(str(ex))
            SendingProcess._logger.exception(message)
            sys.exit(1)
        try:
            self._storage = StorageClient(self._mgt_address, self._mgt_port)
            self._readings = ReadingsStorageClient(self._mgt_address, self._mgt_port)
            self._audit = AuditLogger(self._storage)
        except Exception as ex:
            message = _MESSAGES_LIST["e000023"].format(str(ex))
            SendingProcess._logger.exception(message)
            sys.exit(1)
        else:
            # Reconfigures the logger using the Stream ID to differentiates
            # logging from different processes
            SendingProcess._logger.removeHandler(SendingProcess._logger.handle)
            logger_name = _MODULE_NAME + "_" + str(self.input_stream_id)

            SendingProcess._logger = logger.setup(logger_name, destination=_LOGGER_DESTINATION, level=_LOGGER_LEVEL)

            try:
                # Set the debug level
                if self._log_debug_level == 1:
                    SendingProcess._logger.setLevel(logging.INFO)
                elif self._log_debug_level >= 2:
                    SendingProcess._logger.setLevel(logging.DEBUG)

                # Sets the reconfigured logger
                _LOGGER = SendingProcess._logger

                # Start sending
                if self._start(self.input_stream_id):
                    self.send_data(self.input_stream_id)
                # Stop Sending
                self.stop()
                SendingProcess._logger.info(_MESSAGES_LIST["i000002"])
                sys.exit(0)
            except Exception as ex:
                message = _MESSAGES_LIST["e000002"].format(str(ex))
                SendingProcess._logger.exception(message)
                sys.exit(1)
示例#12
0
    def __init__(self):
        self._logger = logger.setup(__name__, level=20)

        self._monitor_loop_task = None  # type: asyncio.Task
        """Task for :meth:`_monitor_loop`, to ensure it has finished"""
        self._sleep_interval = None  # type: int
        """The time (in seconds) to sleep between health checks"""
        self._ping_timeout = None  # type: int
        """Timeout for a response from any given micro-service"""
        self._max_attempts = None  # type: int
        """Number of max attempts for finding a heartbeat of service"""
示例#13
0
 def test_logger_level(self):
     """
     Test logger level  gets updated 
     :assert: 
         Assert that unless i==0, output.getEffectiveLevel() == i
     """
     for i in range(0, 70, 10):
         output = logger.setup(__name__, level=i)
         if i == 0:
             assert output.getEffectiveLevel() == 30
         else:
             assert output.getEffectiveLevel() == i
示例#14
0
    def test_logger_level(self):
        """ Test logger level gets updated

        :assert:
            Assert that unless i==0, output.getEffectiveLevel() == i
        """
        for i in range(0, 60, 10):
            output = logger.setup(__name__, level=i)
            if i == 0:
                # Level NOTSET (0) so inherits level WARNING (30)
                assert logging.WARNING == output.getEffectiveLevel()
            else:
                assert i == output.getEffectiveLevel()
示例#15
0
    def test_logger_instance(self):
        """ Test the logger type being returned at setup

        :assert:
           Assert that setup returns instance of type logger.Logger
           Assert instance name
           Assert instance hasHandler
           Assert instance default log level WARNING
        """
        instance = logger.setup(__name__)
        assert isinstance(instance, logging.Logger)
        assert "test_logger" == instance.name
        assert instance.hasHandlers()
        assert logging.WARNING == instance.getEffectiveLevel()
示例#16
0
    def __init__(self, core_management_host=None, core_management_port=None):
        """Constructor"""

        cls = Scheduler

        # Initialize class attributes
        if not cls._logger:
            cls._logger = logger.setup(__name__, level=20)
            # cls._logger = logger.setup(__name__, destination=logger.CONSOLE, level=logging.DEBUG)
            # cls._logger = logger.setup(__name__, level=logging.DEBUG)
        if not cls._core_management_port:
            cls._core_management_port = core_management_port
        if not cls._core_management_host:
            cls._core_management_host = core_management_host

        # Instance attributes

        self._storage = None

        self._ready = False
        """True when the scheduler is ready to accept API calls"""
        self._start_time = None  # type: int
        """When the scheduler started"""
        self._max_running_tasks = None  # type: int
        """Maximum number of tasks that can execute at any given time"""
        self._paused = False
        """When True, the scheduler will not start any new tasks"""
        self._process_scripts = dict()
        """Dictionary of scheduled_processes.name to script"""
        self._schedules = dict()
        """Dictionary of schedules.id to _ScheduleRow"""
        self._schedule_executions = dict()
        """Dictionary of schedules.id to _ScheduleExecution"""
        self._task_processes = dict()
        """Dictionary of tasks.id to _TaskProcess"""
        self._check_processes_pending = False
        """bool: True when request to run check_processes"""
        self._scheduler_loop_task = None  # type: asyncio.Task
        """Task for :meth:`_scheduler_loop`, to ensure it has finished"""
        self._scheduler_loop_sleep_task = None  # type: asyncio.Task
        """Task for asyncio.sleep used by :meth:`_scheduler_loop`"""
        self.current_time = None  # type: int
        """Time to use when determining when to start tasks, for testing"""
        self._last_task_purge_time = None  # type: int
        """When the tasks table was last purged"""
        self._max_completed_task_age = None  # type: datetime.timedelta
        """Delete finished task rows when they become this old"""
        self._purge_tasks_task = None  # type: asyncio.Task
        """asynico task for :meth:`purge_tasks`, if scheduled to run"""
示例#17
0
文件: monitor.py 项目: m0fff/FogLAMP
    def __init__(self):
        """Constructor"""

        cls = Monitor

        # Initialize class attributes
        if not cls._logger:
            cls._logger = logger.setup(__name__)

        self._monitor_loop_task = None  # type: asyncio.Task
        """Task for :meth:`_monitor_loop`, to ensure it has finished"""
        self._sleep_interval = None  # type: int
        """The time (in seconds) to sleep between health checks"""
        self._ping_timeout = None  # type: int
        """Timeout for a response from any given microservice"""
示例#18
0
 def test_compare_setup(self):
     """
     Test that logger.setup() generates the same value as logging for 
       level - 10 to 50 
       propagate: True or False
     :assert:
         Assert logging.getLogger() and logger.setup return the same value(s)
     """
     for name in (__name__, 'aaa'):
         log = logging.getLogger(name)
         for level in range(10, 60, 10):
             for propagate in (True, False):
                 log.setLevel(level)
                 log.propagate = propagate
                 assert log is logger.setup(
                     name, propagate=propagate, level=level)
示例#19
0
    def __init__(self):

        super().__init__()

        if not self._logger:
            self._logger = logger.setup(self._MODULE_NAME,
                                        destination=_LOGGER_DESTINATION,
                                        level=_LOGGER_LEVEL)

        self._backup = Backup(self._storage)
        self._backup_lib = lib.BackupRestoreLib(self._storage, self._logger)

        self._job = lib.Job()

        # Creates the objects references used by the library
        lib._logger = self._logger
        lib._storage = self._storage
示例#20
0
    async def run(self):
        global _log_performance
        global _LOGGER

        # Setups signals handlers, to properly handle the termination
        # a) SIGTERM - 15 : kill or system shutdown
        signal.signal(signal.SIGTERM, SendingProcess._signal_handler)

        # Command line parameter handling
        self._log_performance, self._debug_level = handling_input_parameters()
        _log_performance = self._log_performance

        try:
            self._storage_async = StorageClientAsync(
                self._core_management_host, self._core_management_port)
            self._readings = ReadingsStorageClientAsync(
                self._core_management_host, self._core_management_port)
            self._audit = AuditLogger(self._storage_async)
        except Exception as ex:
            SendingProcess._logger.exception(_MESSAGES_LIST["e000023"].format(
                str(ex)))
            sys.exit(1)
        else:
            SendingProcess._logger.removeHandler(SendingProcess._logger.handle)
            logger_name = _MODULE_NAME + "_" + self._name
            SendingProcess._logger = logger.setup(
                logger_name,
                level=logging.INFO
                if self._debug_level in [None, 0, 1] else logging.DEBUG)
            _LOGGER = SendingProcess._logger

            try:
                is_started = await self._start()
                if is_started:
                    await self.send_data()
                self.stop()
                SendingProcess._logger.info("Execution completed.")
                sys.exit(0)
            except (ValueError, Exception) as ex:
                SendingProcess._logger.exception(
                    _MESSAGES_LIST["e000002"].format(str(ex)))
                sys.exit(1)
示例#21
0
 def start(self):
     # Command line parameter handling
     try:
         self._mgt_name, self._mgt_port, self._mgt_address, self.input_stream_id, self._log_performance, self._log_debug_level = \
             handling_input_parameters()
     except Exception as ex:
         message = _MESSAGES_LIST["e000017"].format(str(ex))
         SendingProcess._logger.exception(message)
         sys.exit(1)
     try:
         self._storage = StorageClient(self._mgt_address, self._mgt_port)
         self._readings = ReadingsStorageClient(self._mgt_address,
                                                self._mgt_port)
         self._log_storage = LogStorage(self._storage)
     except Exception as ex:
         message = _MESSAGES_LIST["e000023"].format(str(ex))
         SendingProcess._logger.exception(message)
         sys.exit(1)
     else:
         # Reconfigures the logger using the Stream ID to differentiates
         # logging from different processes
         SendingProcess._logger.removeHandler(SendingProcess._logger.handle)
         logger_name = _MODULE_NAME + "_" + str(self.input_stream_id)
         SendingProcess._logger = logger.setup(logger_name)
         try:
             # Set the debug level
             if self._log_debug_level == 1:
                 SendingProcess._logger.setLevel(logging.INFO)
             elif self._log_debug_level >= 2:
                 SendingProcess._logger.setLevel(logging.DEBUG)
             # Start sending
             if self._start(self.input_stream_id):
                 self.send_data(self.input_stream_id)
             # Stop Sending
             self.stop()
             SendingProcess._logger.info(_MESSAGES_LIST["i000002"])
             sys.exit(0)
         except Exception as ex:
             message = _MESSAGES_LIST["e000002"].format(str(ex))
             SendingProcess._logger.exception(message)
             sys.exit(1)
示例#22
0
async def create_backup(request):
    """
    Creates a backup

    :Example: curl -X POST http://localhost:8082/foglamp/backup
    """
    # TODO : Fix after actual implementation
    Backup.create_backup.return_value = "running"
    status = Backup.create_backup()

    # ##  Test #########################################################################################:

    _logger = logger.setup("BACKUP-API-TEST",
                           destination=logger.SYSLOG,
                           level=logging.DEBUG)

    _logger.info(
        "=== START ==========================================================================================="
    )

    _storage = connect.get_storage()
    backup = backup_postgres.Backup(_storage)

    try:
        await backup.create_backup()
        status = "running"

    except Exception as _ex:
        status = "failed"

    _logger.info(
        "==== END =========================================================================================="
    )
    _logger.handlers = []

    ###  #########################################################################################:

    return web.json_response({"status": status})
示例#23
0
    def __init__(self):

        super().__init__()

        try:
            # FIXME:
            # self._logger = logger.setup(self._MODULE_NAME)
            self._logger = logger.setup(
                self._MODULE_NAME,
                # destination=logger.CONSOLE,
                level=logging.DEBUG)

        except Exception as _ex:
            _message = self._MESSAGES_LIST["e000001"].format(str(_ex))
            _current_time = time.strftime("%Y-%m-%d %H:%M:%S")

            print("[FOGLAMP] {0} - ERROR - {1}".format(_current_time,
                                                       _message),
                  file=sys.stderr)
            sys.exit(1)

        self._config_from_manager = {}
        self._config = {}
        self._job = lib.Job()
        self._event_loop = asyncio.get_event_loop()

        self._foglamp_backup = ""
        self._foglamp_data = ""
        self._foglamp_data_etc = ""
        self._foglamp_root = ""

        self._backups_dir = ""
        self._semaphores_dir = ""

        # Creates the objects references used by the library
        lib._logger = self._logger
        lib._storage = self._storage
    },
    'noOfAssets': {
        'description': 'No. of assets to generate',
        'type': 'integer',
        'default': '2500',
        'order': '2'
    },
    'dataPointsPerSec': {
        'description': 'Data points per second',
        'type': 'integer',
        'default': '5000',
        'order': '3'
    }
}

_LOGGER = logger.setup(__name__, level=logging.INFO)
_task = None
no_of_assets = 0
asset_srl = 0
cn = 0
cn_time = None


def plugin_info():
    """ Returns information about the plugin.
    Args:
    Returns:
        dict: plugin information
    Raises:
    """
    return {
示例#25
0
import datetime
import time
import uuid
from typing import List, Union
import json
from foglamp.common import logger
from foglamp.common.statistics import Statistics
from foglamp.common.storage_client.storage_client import ReadingsStorageClient, StorageClient
from foglamp.common.storage_client.exceptions import StorageServerError

__author__ = "Terris Linenbach"
__copyright__ = "Copyright (c) 2017 OSIsoft, LLC"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"

_LOGGER = logger.setup(__name__)  # type: logging.Logger
_MAX_ATTEMPTS = 2

# _LOGGER = logger.setup(__name__, level=logging.DEBUG)  # type: logging.Logger
# _LOGGER = logger.setup(__name__, destination=logger.CONSOLE, level=logging.DEBUG)


class Ingest(object):
    """Adds sensor readings to FogLAMP

    Also tracks readings-related statistics.
    Readings are added to a configurable list. Configurable batches of inserts are sent to storage
    """

    # Class attributes
示例#26
0
from importlib import import_module
import copy
import json

from collections import OrderedDict
from foglamp.common.storage_client.payload_builder import PayloadBuilder
from foglamp.common.storage_client.storage_client import StorageClient

from foglamp.common import logger

__author__ = "Ashwin Gopalakrishnan, Ashish Jabble"
__copyright__ = "Copyright (c) 2017 OSIsoft, LLC"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"

_logger = logger.setup(__name__)

# MAKE UPPER_CASE
_valid_type_strings = [
    'boolean', 'integer', 'string', 'IPv4', 'IPv6', 'X509 certificate',
    'password', 'JSON'
]


class ConfigurationManager(object):
    """ Configuration Manager

    General naming convention:

    category(s)
        category_name - string
示例#27
0
        'default': 'cc2650poll'
    },
    'pollInterval': {
        'description':
        'The interval between poll calls to the device poll routine expressed in milliseconds.',
        'type': 'integer',
        'default': '500'
    },
    'bluetoothAddress': {
        'description': 'Bluetooth MAC address',
        'type': 'string',
        'default': 'B0:91:22:EA:79:04'
    }
}

_LOGGER = logger.setup(__name__, level=20)

sensortag_characteristics = characteristics


def plugin_info():
    """ Returns information about the plugin.

    Args:
    Returns:
        dict: plugin information
    Raises:
    """

    return {
        'name': 'TI SensorTag CC2650 Poll plugin',
示例#28
0
    "e000025": "Required argument '--name' is missing - command line |{0}|",
    "e000026": "Required argument '--port' is missing - command line |{0}|",
    "e000027": "Required argument '--address' is missing - command line |{0}|",

}
""" Messages used for Information, Warning and Error notice """

# LOG configuration
_LOG_LEVEL_DEBUG = 10
_LOG_LEVEL_INFO = 20
_LOG_LEVEL_WARNING = 30

_LOGGER_LEVEL = _LOG_LEVEL_WARNING
_LOGGER_DESTINATION = logger.SYSLOG

_LOGGER = logger.setup(__name__, destination=_LOGGER_DESTINATION, level=_LOGGER_LEVEL)

_event_loop = ""
_log_performance = False
""" Enable/Disable performance logging, enabled using a command line parameter"""


class PluginInitialiseFailed(RuntimeError):
    """ PluginInitializeFailed """
    pass


class UnknownDataSource(RuntimeError):
    """ the data source could be only one among: readings, statistics or audit """
    pass
示例#29
0
    "e000023": "cannot complete the initialization - error details |{0}|",
    "e000024": "unable to log the operation in the Storage Layer - error details |{0}|",
    "e000025": "Required argument '--name' is missing - command line |{0}|",
    "e000026": "Required argument '--port' is missing - command line |{0}|",
    "e000027": "Required argument '--address' is missing - command line |{0}|",
    "e000028": "cannot complete the fetch operation - error details |{0}|",
    "e000029": "an error occurred  during the teardown operation - error details |{0}|",
    "e000030": "unable to create parent configuration category",
    "e000031": "unable to convert in memory data structure related to the readings data "
               "- error details |{0}| - row |{1}|",
    "e000032": "asset code not defined - row |{0}|",

}
""" Messages used for Information, Warning and Error notice """

_LOGGER = logger.setup(__name__)
_event_loop = ""
_log_performance = False
""" Enable/Disable performance logging, enabled using a command line parameter"""


class PluginInitialiseFailed(RuntimeError):
    """ PluginInitializeFailed """
    pass


class UnknownDataSource(RuntimeError):
    """ the data source could be only one among: readings or statistics"""
    pass

示例#30
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

# FOGLAMP_BEGIN
# See: http://foglamp.readthedocs.io/
# FOGLAMP_END

"""Statistics history process starter"""

import asyncio
from foglamp.tasks.statistics.statistics_history import StatisticsHistory
from foglamp.common import logger

__author__ = "Terris Linenbach, Vaibhav Singhal"
__copyright__ = "Copyright (c) 2017 OSIsoft, LLC"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"

if __name__ == '__main__':
    _logger = logger.setup("StatisticsHistory")
    statistics_history_process = StatisticsHistory()
    loop = asyncio.get_event_loop()
    loop.run_until_complete(statistics_history_process.run())