def main():
    """Main function for SPEAD sender module."""
    # Check command line arguments.
    if len(sys.argv) != 2:
        raise RuntimeError('Usage: python3 async_send.py <json config>')

    # Set up logging.
    sip_logging.init_logger(show_thread=False)

    # Load SPEAD configuration from JSON file.
    # _path = os.path.dirname(os.path.abspath(__file__))
    # with open(os.path.join(_path, 'spead_send.json')) as file_handle:
    #     spead_config = json.load(file_handle)
    spead_config = json.loads(sys.argv[1])
    try:
        _path = os.path.dirname(os.path.abspath(__file__))
        schema_path = os.path.join(_path, 'config_schema.json')
        with open(schema_path) as schema_file:
            schema = json.load(schema_file)
        validate(spead_config, schema)
    except ValidationError as error:
        print(error.cause)
        raise

    # Set up the SPEAD sender and run it (see method, above).
    sender = SpeadSender(spead_config)
    sender.run()
Exemplo n.º 2
0
def execute_processing_block(pb_id: str, log_level='DEBUG'):
    """Execute a processing block.

    Celery tasks that executes a workflow defined in a Configuration database
    Processing Block data object.

    Args:
        pb_id (str): The PB id for the PBC
        log_level (str): Python logging level.

    """
    init_logger('sip',
                show_log_origin=True,
                propagate=False,
                log_level=log_level)
    LOG.info('+' * 40)
    LOG.info('+ Executing Processing block: %s!', pb_id)
    LOG.info('+' * 40)
    LOG.info('Processing Block Controller version: %s', __version__)
    LOG.info('Docker Swarm API version: %s', sip_swarm_api_version)
    LOG.info('Configuration database API version: %s', config_db_version)

    pb = ProcessingBlock(pb_id)

    LOG.info('Starting workflow %s %s', pb.workflow_id, pb.workflow_version)

    pb.set_status('running')
    docker = DockerSwarmClient()

    # Coping workflow stages to a dict
    workflow_stage_dict = {}
    for stage in pb.workflow_stages:
        workflow_stage_dict[stage.id] = deepcopy(stage.config)
        workflow_stage_dict[stage.id]['services'] = dict()

    # Loop until workflow stages are complete.
    while True:
        time.sleep(0.1)

        for workflow_stage in pb.workflow_stages:

            _start_workflow_stages(pb, pb_id, workflow_stage_dict,
                                   workflow_stage, docker)

            _update_workflow_stages(workflow_stage_dict[workflow_stage.id],
                                    workflow_stage, docker)

        if _abort_workflow(pb, workflow_stage_dict, docker):
            break

        if _workflow_complete(workflow_stage_dict):
            break

    pb_list = ProcessingBlockList()
    pb_list.set_complete(pb_id)
    pb.set_status('completed')
    LOG.info('-' * 40)
    LOG.info('- Destroying PBC for %s', pb_id)
    LOG.info('-' * 40)
    return pb.status
Exemplo n.º 3
0
def main():
    """Main function for SPEAD sender module."""
    # Check command line arguments.
    if len(sys.argv) != 2:
        raise RuntimeError('Usage: python3 async_send.py <json config>')

    # Set up logging.
    sip_logging.init_logger(show_thread=False)

    # Load SPEAD configuration from JSON file.
    # _path = os.path.dirname(os.path.abspath(__file__))
    # with open(os.path.join(_path, 'spead_send.json')) as file_handle:
    #     spead_config = json.load(file_handle)
    spead_config = json.loads(sys.argv[1])
    try:
        _path = os.path.dirname(os.path.abspath(__file__))
        schema_path = os.path.join(_path, 'config_schema.json')
        with open(schema_path) as schema_file:
            schema = json.load(schema_file)
        validate(spead_config, schema)
    except ValidationError as error:
        print(error.cause)
        raise

    # Set up the SPEAD sender and run it (see method, above).
    sender = SpeadSender(spead_config)
    sender.run()
def execute_processing_block(pb_id: str, log_level='DEBUG'):
    """Execute a processing block.

    Celery tasks that executes a workflow defined in a Configuration database
    Processing Block data object.

    Args:
        pb_id (str): The PB id for the PBC
        log_level (str): Python logging level.

    """
    init_logger('sip', show_log_origin=True, propagate=False,
                log_level=log_level)
    LOG.info('+' * 40)
    LOG.info('+ Executing Processing block: %s!', pb_id)
    LOG.info('+' * 40)
    LOG.info('Processing Block Controller version: %s', __version__)
    LOG.info('Docker Swarm API version: %s', sip_swarm_api_version)
    LOG.info('Configuration database API version: %s', config_db_version)

    pb = ProcessingBlock(pb_id)

    LOG.info('Starting workflow %s %s', pb.workflow_id, pb.workflow_version)

    pb.set_status('running')
    docker = DockerSwarmClient()

    # Coping workflow stages to a dict
    workflow_stage_dict = {}
    for stage in pb.workflow_stages:
        workflow_stage_dict[stage.id] = deepcopy(stage.config)
        workflow_stage_dict[stage.id]['services'] = dict()

    # Loop until workflow stages are complete.
    while True:
        time.sleep(0.1)

        for workflow_stage in pb.workflow_stages:

            _start_workflow_stages(pb, pb_id, workflow_stage_dict,
                                   workflow_stage, docker)

            _update_workflow_stages(workflow_stage_dict[workflow_stage.id],
                                    workflow_stage, docker)

        if _abort_workflow(pb, workflow_stage_dict, docker):
            break

        if _workflow_complete(workflow_stage_dict):
            break

    pb_list = ProcessingBlockList()
    pb_list.set_complete(pb_id)
    pb.set_status('completed')
    LOG.info('-' * 40)
    LOG.info('- Destroying PBC for %s', pb_id)
    LOG.info('-' * 40)
    return pb.status
Exemplo n.º 5
0
def main():
    """Main function for SPEAD receiver module."""
    # Check command line arguments.
    if len(sys.argv) < 2:
        raise RuntimeError('Usage: python3 async_recv.py <json config>')

    # Set up logging.
    sip_logging.init_logger(show_thread=True)

    # Load SPEAD configuration from JSON file.
    # with open(sys.argv[-1]) as f:
    #     spead_config = json.load(f)
    spead_config = json.loads(sys.argv[1])

    # Set up the SPEAD receiver and run it (see method, above).
    receiver = SpeadReceiver(spead_config)
    receiver.run()
def _parse_args():
    """Command line parser."""
    parser = argparse.ArgumentParser(description='{} service.'.
                                     format(__service_id__))
    parser.add_argument('--random_errors', action='store_true',
                        help='Enable random errors')
    parser.add_argument('-v', action='store_true',
                        help='Verbose mode (enable debug printing)')
    parser.add_argument('-vv', action='store_true', help='Extra verbose mode')
    args = parser.parse_args()

    if args.vv:
        init_logger(log_level='DEBUG', show_log_origin=True)
    elif args.v:
        init_logger(logger_name='sip.ec.master_controller', log_level='DEBUG')
    else:
        init_logger(log_level='INFO')

    return args
Exemplo n.º 7
0
def _parse_args():
    """Command line parser."""
    parser = argparse.ArgumentParser(
        description='{} service.'.format(__service_id__))
    parser.add_argument('--random_errors',
                        action='store_true',
                        help='Enable random errors')
    parser.add_argument('-v',
                        action='store_true',
                        help='Verbose mode (enable debug printing)')
    parser.add_argument('-vv', action='store_true', help='Extra verbose mode')
    args = parser.parse_args()

    if args.vv:
        init_logger(log_level='DEBUG', show_log_origin=True)
    elif args.v:
        init_logger(logger_name='sip.ec.master_controller', log_level='DEBUG')
    else:
        init_logger(log_level='INFO')

    return args
# coding=utf-8
"""Module to get tango device variables."""
import logging
from sip_logging import init_logger
from tango import DeviceProxy

init_logger()
log = logging.getLogger('sip.itango_utils.get_tango_devices')

device_name = 'sip_sdp/elt/master'
log.info('* Getting Tango Master Device (%s) as: \'md\'', device_name)
md = DeviceProxy(device_name)
log.info('  - Tango Master Device version: %s, status: %s', md.version,
         md.status())
log.info('* Tango Master Device attributes:')
for attr in md.get_attribute_list():
    log.info('  - %s', attr)
log.info('* Tango Master Device commands:')
for cmd in md.get_command_list():
    log.info('  - %s', cmd)


sub = []
for index in range(2):
    device_name = 'sip_sdp/elt/subarray_{:02d}'.format(index)
    log.info('* Getting Subarray %02d Device (%s) as: \'sub[%02d]\'',
             index, device_name, index)
    device = DeviceProxy(device_name)
    # sub.append(DeviceProxy('sip_sdp/elt/subarray_%02d'.format(index)))
    # log.info('  - Subarray %02d Device version: %s, status: %s',
    #          sub[index].version, sub[index].status())
Exemplo n.º 9
0
def register_master():
    """Register the SDP Master device."""
    tango_db = Database()
    device = "sip_sdp/elt/master"
    device_info = DbDevInfo()
    device_info._class = "SDPMasterDevice"
    device_info.server = "sdp_master_ds/1"
    device_info.name = device
    devices = tango_db.get_device_name(device_info.server, device_info._class)
    if device not in devices:
        LOG.info('Registering device "%s" with device server "%s"',
                 device_info.name, device_info.server)
        tango_db.add_device(device_info)


def main(args=None, **kwargs):
    """Run the Tango SDP Master device server."""
    LOG.info('Starting %s', __service_id__)
    return run([SDPMasterDevice],
               verbose=True,
               msg_stream=sys.stdout,
               args=args,
               **kwargs)


if __name__ == '__main__':
    init_logger(logger_name='', show_log_origin=True)
    init_logger(show_log_origin=True)
    register_master()
    main()
from release import LOG, __service_id__


def register_master():
    """Register the SDP Master device."""
    tango_db = Database()
    device = "sip_sdp/elt/master"
    device_info = DbDevInfo()
    device_info._class = "SDPMasterDevice"
    device_info.server = "sdp_master_ds/1"
    device_info.name = device
    devices = tango_db.get_device_name(device_info.server, device_info._class)
    if device not in devices:
        LOG.info('Registering device "%s" with device server "%s"',
                 device_info.name, device_info.server)
        tango_db.add_device(device_info)


def main(args=None, **kwargs):
    """Run the Tango SDP Master device server."""
    LOG.info('Starting %s', __service_id__)
    return run([SDPMasterDevice], verbose=True, msg_stream=sys.stdout,
               args=args, **kwargs)


if __name__ == '__main__':
    init_logger(logger_name='', show_log_origin=True)
    init_logger(show_log_origin=True)
    register_master()
    main()
    log = logging.getLogger('sip.examples.log_spammer')

    log.info('Starting to spam log messages every %fs', sleep_length)
    counter = 0
    try:
        while True:
            log.info('Hello %06i (log_spammer: %s, sip logging: %s)',
                     counter, _version.__version__, __version__)
            counter += 1
            time.sleep(sleep_length)
    except KeyboardInterrupt:
        log.info('Exiting...')


if __name__ == '__main__':
    PARSER = argparse.ArgumentParser(description='Spam stdout with Python '
                                                 'logging.')
    PARSER.add_argument('sleep_length', type=float,
                        help='number of seconds to sleep between messages.')
    PARSER.add_argument('--timestamp-us', required=False, action='store_true',
                        help='Use microsecond timestamps.')
    PARSER.add_argument('--show-thread', required=False, action='store_true',
                        help='Show the thread in the logging output.')
    args = PARSER.parse_args()

    P3_MODE = False if args.timestamp_us else True
    show_thread = True if args.show_thread else False
    init_logger(p3_mode=P3_MODE, show_thread=show_thread)

    main(args.sleep_length)
Exemplo n.º 12
0
        while True:
            log.info('Hello %06i (log_spammer: %s, sip logging: %s)', counter,
                     _version.__version__, __version__)
            counter += 1
            time.sleep(sleep_length)
    except KeyboardInterrupt:
        log.info('Exiting...')


if __name__ == '__main__':
    PARSER = argparse.ArgumentParser(description='Spam stdout with Python '
                                     'logging.')
    PARSER.add_argument('sleep_length',
                        type=float,
                        help='number of seconds to sleep between messages.')
    PARSER.add_argument('--timestamp-us',
                        required=False,
                        action='store_true',
                        help='Use microsecond timestamps.')
    PARSER.add_argument('--show-thread',
                        required=False,
                        action='store_true',
                        help='Show the thread in the logging output.')
    args = PARSER.parse_args()

    P3_MODE = False if args.timestamp_us else True
    show_thread = True if args.show_thread else False
    init_logger(p3_mode=P3_MODE, show_thread=show_thread)

    main(args.sleep_length)
Exemplo n.º 13
0
from random import randrange
import time
from http import HTTPStatus
import jsonschema

from flask import request
from flask_api import FlaskAPI

from sip_config_db.scheduling import ProcessingBlockList, \
    SchedulingBlockInstanceList
from sip_config_db.states import SDPState
from sip_config_db.scheduling import Subarray, SubarrayList
from sip_logging import init_logger
from .release import LOG, __service_id__, __version__

init_logger('flask.logging.wsgi_errors_stream')
init_logger()

APP = FlaskAPI(__name__)

START_TIME = time.time()


def _check_status(sdp_state):
    """SDP Status check.

    Do all the tests to determine, if the SDP state is
    "broken", what could be the cause, and return a
    suitable status message to be sent back by the calling
    function.
    """
Exemplo n.º 14
0
# -*- coding: utf-8 -*-
"""Processing Controller Scheduler application main."""
from sip_logging import init_logger
from sip_logging.sip_logging import disable_logger
from .log import LOG
from .release import __service_name__
from .scheduler import ProcessingBlockScheduler

if __name__ == '__main__':
    init_logger(show_log_origin=False, show_thread=True, p3_mode=False)
    disable_logger('sip.ec.config_db')
    LOG.info('Starting %s', __service_name__)
    ProcessingBlockScheduler().start()