Пример #1
0
    def __init__(
        self,
        base_config,
        is_passive,
        interrupt_sig=None,
        *args,
        **kwargs,
    ):
        self.class_name = self.__class__.__name__
        super().__init__(service_name=self.class_name)

        self.log = LogParser(base_config=base_config, title=__name__)

        self.base_config = base_config

        self.is_passive = is_passive
        if self.is_passive:
            self.base_config.clock_sim = self

        self.redis = RedisManager(name=self.class_name,
                                  base_config=base_config,
                                  log=self.log)

        self.interrupt_sig = interrupt_sig
        if self.interrupt_sig is None:
            self.interrupt_sig = multiprocessing.Event()

        if not self.is_passive:
            with ClockSim.lock:
                self.setup_active_instance()

        return
Пример #2
0
    async def setup(self, *args):
        self.setup_args = args

        widget_info = await self.sm.get_lazy_widget_info(
            sess_id=self.sess_id,
            widget_id=self.widget_id,
        )
        if widget_info is None:
            return

        if self.n_icon is None:
            self.n_icon = widget_info['n_icon']
            # self.icon_id = widget_info['icon_id']

        # override the global logging variable with a
        # name corresponding to the current session id
        self.log = LogParser(
            base_config=self.base_config,
            title=(
                str(self.user_id) + '/' + str(self.sm.sess_id) + '/' + __name__ + '/'
                + self.widget_id
            ),
        )

        return
Пример #3
0
    def __init__(self, base_config, service_name, interrupt_sig):
        self.class_name = self.__class__.__name__
        service_name = (service_name
                        if service_name is not None else self.class_name)
        super().__init__(service_name=service_name)

        self.log = LogParser(base_config=base_config, title=__name__)
        # self.log.info([['y', ' - InstHealth - ']])

        self.base_config = base_config
        self.site_type = self.base_config.site_type
        self.clock_sim = self.base_config.clock_sim
        self.inst_data = self.base_config.inst_data

        self.service_name = service_name
        self.interrupt_sig = interrupt_sig

        self.redis = RedisManager(name=self.class_name,
                                  base_config=self.base_config,
                                  log=self.log)

        self.tel_ids = self.inst_data.get_inst_ids()

        self.inst_health_s0 = dict()
        self.inst_health_s1 = dict()
        self.inst_health = dict()
        self.inst_health_sub_flat = dict()

        # minimum interval of simulation-time to wait before randomising values
        min_wait_update_sec = 10
        self.check_update_opts = {
            'prev_update': None,
            'min_wait': min_wait_update_sec,
        }

        # the fraction of telescopes to randomely update
        self.update_frac = 0.05

        # set debug_updates to 0 to have mild updates, to 1 to have frequent
        # updates for a single inst, or to 2 to frequently update all instruments
        self.debug_updates = 1

        self.inst_data = self.base_config.inst_data
        self.health_tag = self.inst_data.health_tag

        # sleep duration for thread loops
        self.loop_sleep_sec = 1
        # minimal real-time delay between randomisations (once every self.loop_act_rate sec)
        self.loop_act_rate = max(int(5 / self.loop_sleep_sec), 1)

        self.init()

        # make sure this is the only active instance
        self.init_active_instance()

        self.setup_threads()

        return
Пример #4
0
    def __init__(self, base_config, *args, **kwargs):
        self.log = LogParser(base_config=base_config, title=__name__)
        self.log.info([['y', " - ViewManager - "],
                       ['g', base_config.site_type]])

        self.base_config = base_config
        self.app_prefix = base_config.app_prefix
        self.site_type = base_config.site_type
        self.websocket_route = base_config.websocket_route

        return
Пример #5
0
    def __init__(self):
        self.class_name = self.__class__.__name__

        self.app_name = 'data_manager'
        settings = parse_args(app_name=self.app_name)

        # southern or northen CTA sites have different telescope configurations
        site_type = settings['site_type']
        # the address for the site
        app_host = settings['app_host']
        # local log file location
        log_file = settings['log_file']
        # logging level
        log_level = settings['log_level']
        # the port for the site
        app_port = settings['app_port']
        # the redis port use for this site
        self.redis_port = settings['redis_port']
        # define the prefix to all urls (must be non-empy string)
        app_prefix = settings['app_prefix']
        # is this a simulation
        is_simulation = settings['is_simulation']
        # development mode
        debug_opts = settings['debug_opts']
        # do we flush redis on startup
        self.do_flush_redis = settings['do_flush_redis']

        # instantiate the general settings class (must come first!)
        self.base_config = BaseConfig(
            site_type=site_type,
            redis_port=self.redis_port,
            app_port=app_port,
            app_prefix=app_prefix,
            app_host=app_host,
            log_level=log_level,
            websocket_route=None,
            allow_panel_sync=None,
            debug_opts=debug_opts,
            is_simulation=is_simulation,
        )

        self.log = LogParser(
            base_config=self.base_config,
            title=__name__,
            log_level=log_level,
            log_file=log_file,
        )

        self.redis = RedisManager(
            name=self.class_name, base_config=self.base_config, log=self.log
        )

        return
Пример #6
0
    def __init__(self,
                 base_config,
                 service_name,
                 interrupt_sig,
                 end_time_sec=None,
                 timescale=None,
                 *args,
                 **kwargs):
        self.class_name = self.__class__.__name__
        service_name = (service_name
                        if service_name is not None else self.class_name)
        super().__init__(service_name=service_name)

        self.log = LogParser(base_config=base_config, title=__name__)

        self.base_config = base_config

        self.service_name = service_name
        self.interrupt_sig = interrupt_sig

        # 28800 -> 8 hour night
        self.end_time_sec = 28800 if end_time_sec is None else end_time_sec
        # 0.035 -> have 30 minutes last for one minute in real time
        self.timescale = 0.07 if end_time_sec is None else timescale
        # 0.0035 -> have 30 minutes last for 6 sec in real time
        # if not has_acs:
        #   self.timescale /= 2
        # self.timescale /= 20

        self.redis = RedisManager(name=self.class_name,
                                  base_config=base_config,
                                  log=self.log)

        self.n_night = -1

        # sleep duration for thread loops
        self.loop_sleep_sec = 1

        # range in seconds of time-series data to be stored for eg monitoring points
        self.epoch = datetime.utcfromtimestamp(0)
        self.time_series_n_seconds = 60 * 30
        self.second_scale = 1000

        self.reset_night()

        # make sure this is the only active instance
        self.init_active_instance()

        self.setup_threads()

        return
Пример #7
0
    def __init__(self, base_config, service_name, interrupt_sig):
        self.class_name = self.__class__.__name__
        service_name = (service_name
                        if service_name is not None else self.class_name)
        super().__init__(service_name=service_name)

        self.log = LogParser(base_config=base_config, title=__name__)

        self.base_config = base_config
        self.site_type = self.base_config.site_type
        self.clock_sim = self.base_config.clock_sim
        self.inst_data = self.base_config.inst_data

        self.service_name = service_name
        self.interrupt_sig = interrupt_sig

        self.tel_ids = self.inst_data.get_inst_ids()

        self.inst_pos_0 = self.base_config.inst_pos_0

        self.redis = RedisManager(name=self.class_name,
                                  base_config=self.base_config,
                                  log=self.log)

        # ------------------------------------------------------------------
        rnd_seed = 10989152934
        self.rnd_gen = Random(rnd_seed)

        # minimum interval of simulation-time to wait before randomising values
        min_wait_update_sec = 10
        self.check_update_opts = {
            'prev_update': None,
            'min_wait': min_wait_update_sec,
        }

        # sleep duration for thread loops
        self.loop_sleep_sec = 1
        # minimal real-time delay between randomisations (once every self.loop_act_rate sec)
        self.loop_act_rate = max(int(5 / self.loop_sleep_sec), 1)

        self.init()

        # make sure this is the only active instance
        self.init_active_instance()

        self.setup_threads()

        return
Пример #8
0
    def __init__(self, reload_dirs, app_name, services):
        self.reload_dirs = reload_dirs
        self.app_name = app_name
        self.services = services

        settings = parse_args(app_name=self.app_name)

        # top level module names for the project
        self.module_names = settings['module_names']
        # local log file location
        log_file = settings['log_file']
        # logging level
        log_level = settings['log_level']
        # development mode
        self.reload_services = settings['reload']

        # time to wait between file changes (if multiple files change at the
        # same time, this avoids multiple reloading)
        self.time_wait_sec = 3

        self.log = LogParser(
            base_config=None,
            title=__name__,
            log_level=log_level,
            log_file=log_file,
        )
        self.log.info([['c', ' - has_acs = '],
                       [('g' if has_acs else 'r'), has_acs]])

        settings_log = [['g', ' - server settings:\n']]
        for k, v in settings.items():
            settings_log += [['b', str(k)], [': ']]
            settings_log += [['c', str(v)], [',  ']]
        self.log.info(settings_log)

        # add all python files from the current directory to the list
        # of reloadable modules
        self.module_names = copy.deepcopy(self.module_names)
        for (root, d_names, f_names) in os.walk(os.getcwd()):
            self.module_names += [
                f.replace('.py', '') for f in f_names if f.endswith('.py')
            ]

        return
Пример #9
0
    def __init__(self, widget_id=None, sm=None, *args, **kwargs):
        self.log = LogParser(base_config=sm.base_config, title=__name__)

        # the parent of this widget
        self.sm = sm
        # the shared basic configuration class
        self.base_config = self.sm.base_config
        self.sess_id = self.sm.sess_id
        self.user_id = self.sm.user_id

        # the id of this instance
        self.widget_id = widget_id
        # widget-class and widget group names
        self.widget_type = self.__class__.__name__
        # for common threading
        self.widget_group = self.sm.user_group_id + '_' + self.widget_type

        # redis interface
        self.redis = RedisManager(
            name=self.widget_type, base_config=self.base_config, log=self.log
        )

        # turn on periodic data updates
        self.do_data_updates = True
        # some etra logging messages for this module
        self.log_send_packet = False

        # fixed or dynamic icon
        self.n_icon = None
        # self.n_icon = -1
        # self.icon_id = -1

        # list of utility classes to loop over
        self.my_utils = dict()

        # arguments given to the setup function, to later be
        # passed to utils if needed
        self.setup_args = None

        return
Пример #10
0
    def __init__(self, base_config):
        self.log = LogParser(base_config=base_config, title=__name__)
        self.log.info([['g', ' - starting MockTarget ...']])

        self.base_config = base_config
        self.site_type = self.base_config.site_type

        self.class_name = self.__class__.__name__
        self.redis = RedisManager(
            name=self.class_name, base_config=self.base_config, log=self.log
        )

        # ------------------------------------------------------------------
        rnd_seed = 10989152934
        # self.rnd_gen = Random(rnd_seed)
        self.rnd_gen = Random()

        self.az_min_max = [-180, 180]
        self.zen_min_max_tel = [0, 70]

        self.init()

        return
Пример #11
0
    def __init__(self, util_id=None, parent=None, *args, **kwargs):
        self.class_name = self.__class__.__name__

        self.log = LogParser(base_config=parent.base_config, title=__name__)

        # the parent of this widget
        self.parent = parent
        # the id of this inistance
        self.util_id = util_id

        # keep a reference of the parent widget
        self.parent = parent
        # the shared basic configuration class
        self.base_config = self.parent.base_config
        # the id of this instance
        self.widget_id = parent.widget_id
        # the parent of this widget
        self.sm = parent.sm
        # widget-class and widget group names
        self.widget_type = parent.widget_type
        # redis interface
        self.redis = parent.redis
        # turn on periodic data updates
        self.do_data_updates = parent.do_data_updates
        # some etra logging messages for this module
        self.log_send_packet = parent.log_send_packet
        # locker
        self.locker = self.sm.locker

        # validate that all required properties have been defined
        check_init_properties = [
            'widget_id',
            'sm',
            'widget_type',
            'redis',
            'do_data_updates',
            'log_send_packet',
            'n_icon',
            # 'icon_id',
        ]

        for init_property in check_init_properties:
            if not hasattr(parent, init_property):
                raise Exception(
                    ' - bad initialisation of ArrZoomerUtil()... - missing property: ',
                    init_property,
                )

        return
Пример #12
0
class InstPos(ServiceManager):
    """telescope pointing-position simulation class, simulating changes of pointing

       Only a single active instance is allowed to exist
    """

    lock = Lock()

    # ------------------------------------------------------------------
    def __init__(self, base_config, service_name, interrupt_sig):
        self.class_name = self.__class__.__name__
        service_name = (service_name
                        if service_name is not None else self.class_name)
        super().__init__(service_name=service_name)

        self.log = LogParser(base_config=base_config, title=__name__)

        self.base_config = base_config
        self.site_type = self.base_config.site_type
        self.clock_sim = self.base_config.clock_sim
        self.inst_data = self.base_config.inst_data

        self.service_name = service_name
        self.interrupt_sig = interrupt_sig

        self.tel_ids = self.inst_data.get_inst_ids()

        self.inst_pos_0 = self.base_config.inst_pos_0

        self.redis = RedisManager(name=self.class_name,
                                  base_config=self.base_config,
                                  log=self.log)

        # ------------------------------------------------------------------
        rnd_seed = 10989152934
        self.rnd_gen = Random(rnd_seed)

        # minimum interval of simulation-time to wait before randomising values
        min_wait_update_sec = 10
        self.check_update_opts = {
            'prev_update': None,
            'min_wait': min_wait_update_sec,
        }

        # sleep duration for thread loops
        self.loop_sleep_sec = 1
        # minimal real-time delay between randomisations (once every self.loop_act_rate sec)
        self.loop_act_rate = max(int(5 / self.loop_sleep_sec), 1)

        self.init()

        # make sure this is the only active instance
        self.init_active_instance()

        self.setup_threads()

        return

    # ------------------------------------------------------------------
    def setup_threads(self):

        self.add_thread(target=self.loop_main)

        return

    # ------------------------------------------------------------------
    def init(self):
        # self.log.info([['g', ' - InstPos.init() ...']])

        with InstPos.lock:
            self.update_inst_pos()

        return

    # ------------------------------------------------------------------
    def update_inst_pos(self):
        min_delta_pos_sqr = pow(0.05, 2)
        frac_delta_pos = 0.25

        inst_pos_in = dict()
        if self.redis.exists('inst_pos'):
            inst_pos_in = self.redis.h_get_all(name='inst_pos')

        obs_block_ids = self.redis.get(name=('obs_block_ids_' + 'run'),
                                       default_val=[])

        pipe = self.redis.get_pipe()
        for obs_block_id in obs_block_ids:
            pipe.get(obs_block_id)
        blocks = pipe.execute()

        tel_point_pos = dict()
        for n_block in range(len(blocks)):
            if not isinstance(blocks[n_block], dict):
                continue
            if len(blocks[n_block]['pointings']) == 0:
                continue
            tel_ids = (blocks[n_block]['telescopes']['large']['ids'] +
                       blocks[n_block]['telescopes']['medium']['ids'] +
                       blocks[n_block]['telescopes']['small']['ids'])
            point_pos = blocks[n_block]['pointings'][0]['pos']

            for id_now in tel_ids:
                tel_point_pos[id_now] = point_pos

        for id_now in self.tel_ids:
            inst_pos_now = inst_pos_in[
                id_now] if id_now in inst_pos_in else self.inst_pos_0
            if inst_pos_now is None:
                inst_pos_now = self.inst_pos_0
            inst_pos_new = inst_pos_now

            if id_now in tel_point_pos:
                point_pos = tel_point_pos[id_now]

                pos_dif = [(point_pos[0] - inst_pos_now[0]),
                           (point_pos[1] - inst_pos_now[1])]
                if (pos_dif[0] > 360):
                    pos_dif[0] -= 360
                elif (pos_dif[0] < -360):
                    pos_dif[0] += 360
                # if(pos_dif[0] > 180):
                #   pos_dif[0] -= 360
                # elif(pos_dif[0] < -180):
                #   pos_dif[0] += 360
                if (pos_dif[1] >= 90):
                    pos_dif[1] -= 90

                rnd_scale = 1
                if (pos_dif[0] * pos_dif[0] +
                        pos_dif[1] * pos_dif[1]) < min_delta_pos_sqr:
                    rnd_scale = -1.5 if (self.rnd_gen.random() < 0.5) else 1.5

                inst_pos_new = [
                    inst_pos_now[0] + pos_dif[0] * rnd_scale *
                    self.rnd_gen.random() * frac_delta_pos,
                    inst_pos_now[1] + pos_dif[1] * rnd_scale *
                    self.rnd_gen.random() * frac_delta_pos
                ]

            pipe.h_set(name='inst_pos', key=id_now, data=inst_pos_new)

        pipe.execute()

        return

    # ------------------------------------------------------------------
    def loop_main(self):
        self.log.info([['g', ' - starting InstPos.loop_main ...']])
        sleep(0.1)

        n_loop = 0
        while self.can_loop():
            n_loop += 1
            sleep(self.loop_sleep_sec)
            if n_loop % self.loop_act_rate != 0:
                continue

            need_update = self.clock_sim.need_data_update(
                update_opts=self.check_update_opts, )
            if not need_update:
                continue

            with InstPos.lock:
                self.update_inst_pos()

        self.log.info([['c', ' - ending InstPos.loop_main ...']])

        return
Пример #13
0
class SetupServer():
    """class for running / reloading services
    """
    def __init__(self, reload_dirs, app_name, services):
        self.reload_dirs = reload_dirs
        self.app_name = app_name
        self.services = services

        settings = parse_args(app_name=self.app_name)

        # top level module names for the project
        self.module_names = settings['module_names']
        # local log file location
        log_file = settings['log_file']
        # logging level
        log_level = settings['log_level']
        # development mode
        self.reload_services = settings['reload']

        # time to wait between file changes (if multiple files change at the
        # same time, this avoids multiple reloading)
        self.time_wait_sec = 3

        self.log = LogParser(
            base_config=None,
            title=__name__,
            log_level=log_level,
            log_file=log_file,
        )
        self.log.info([['c', ' - has_acs = '],
                       [('g' if has_acs else 'r'), has_acs]])

        settings_log = [['g', ' - server settings:\n']]
        for k, v in settings.items():
            settings_log += [['b', str(k)], [': ']]
            settings_log += [['c', str(v)], [',  ']]
        self.log.info(settings_log)

        # add all python files from the current directory to the list
        # of reloadable modules
        self.module_names = copy.deepcopy(self.module_names)
        for (root, d_names, f_names) in os.walk(os.getcwd()):
            self.module_names += [
                f.replace('.py', '') for f in f_names if f.endswith('.py')
            ]

        return

    # ------------------------------------------------------------------
    def deep_module_reload(self, is_verb):
        """collect all modules from the project and reload them (used for development purposes)
        """

        # sleep to encourage unique random seeds for BaseConfig (which are
        # based on the current time in msec)
        sleep(1e-3)

        # add all relevant modules to populate the sys.modules list
        importlib.import_module('manager')

        # get all loaded modules
        mods = tuple(sys.modules)

        # filter moduls from the project
        mods = [
            mod for mod in mods if any([n in mod for n in self.module_names])
        ]

        # just in case, make sure we have no duplicate entries
        mods = list(dict.fromkeys(mods))

        # sort, beginning with ordered_items, then the most deeply nested
        # make sure to start with those modules which others depend on
        # such as BaseConfig (for which class attributes used eg by utils)
        ordered_items = ['shared.BaseConfig', 'shared.LogParser', 'InstPos']

        def sort_key(item):
            if item in ordered_items:
                order = ordered_items.index(item)
            else:
                order = len(ordered_items) + 1 / (1 + item.count('.'))
            return order

        mods = sorted(mods, key=sort_key, reverse=False)
        # mods = sorted(mods, key=lambda item: item.count('.'), reverse=True)

        if is_verb:
            self.log.info([['wg', ' - reloading modules:'],
                           ['c', ' ', ', '.join(mods)]])

        # reload modules
        for mod in mods:
            # print(sys.modules[mod].__name__)
            importlib.reload(sys.modules[mod])

        return

    # ------------------------------------------------------------------
    def get_manager(self):
        """reload the manager class
        """

        self.deep_module_reload(is_verb=False)

        import manager as manager
        manager = manager.Manager()

        return manager

    # ------------------------------------------------------------------
    def run_service(self, service_name, interrupt_sig):
        """reload and run the manager class for a given service
        """

        try:
            manager = self.get_manager()
            manager.run_service(service_name=service_name,
                                interrupt_sig=interrupt_sig)

        except KeyboardInterrupt:
            interrupt_sig.set()
            pass

        except Exception as e:
            self.log.info([['wr', e]])
            traceback.print_tb(e.__traceback__)
            raise e

        return

    # ------------------------------------------------------------------
    def run_server(self):
        """run the services, watching for file changes (for development) in order to reload
        """
        def spawn_procs():
            """create processes for each service
            """

            # short wait to allow possible previous iterations to clear
            sleep(1)

            self.deep_module_reload(is_verb=True)

            self.multi_procs = []
            self.interrupt_sig = multiprocessing.Event()

            for n_service in range(len(self.services)):
                service_name = self.services[n_service]['name']
                is_blocking = self.services[n_service]['is_blocking']
                multi_proc = multiprocessing.Process(
                    target=self.run_service,
                    kwargs={
                        'interrupt_sig': self.interrupt_sig,
                        'service_name': service_name,
                    },
                )
                multi_proc.start()

                if is_blocking:
                    # blocking services will be run until they finish
                    multi_proc.join()
                else:
                    # non-blocking services will run asynchronously
                    self.multi_procs += [multi_proc]

            return

        def clear_procs():
            """cleanup processes for all services
            """

            # upon changes, send the interrupt signal to all asynchronous services
            self.interrupt_sig.set()

            # wait for all asynchronously services to finish
            for multi_proc in self.multi_procs:
                multi_proc.join()

            return

        class FileChangeHandler(PatternMatchingEventHandler):
            def on_any_event(self, event):
                """overloaded function from PatternMatchingEventHandler, called
                   on any watched file change
                """

                # print(f'event type: {event.event_type}  path : {event.src_path}')

                if not self._need_reload():
                    return
                self.set_reload_time_sec()

                clear_procs()
                if self._reload_services:
                    spawn_procs()
                else:
                    self.set_can_keep_reloading(False)
                return

            def set_reload_time_wait_sec(self, time_wait_sec):
                self._reload_time_wait_sec = time_wait_sec
                return

            def set_reload_time_sec(self, reload_time_sec=None):
                self._reload_time_sec = (reload_time_sec if reload_time_sec
                                         is not None else get_time('sec'))
                return

            def _need_reload(self):
                return (get_time('sec') - self._reload_time_sec >
                        self._reload_time_wait_sec)

            def set_reload_services(self, reload_services):
                self._reload_services = reload_services
                return

            def set_can_keep_reloading(self, can_reload):
                self._can_reload = can_reload
                return

            def get_can_keep_reloading(self):
                return self._can_reload

        # watch all python files, ignoring the current one
        event_handler = FileChangeHandler(
            patterns=['*.py'],
            ignore_patterns=['*/server_setup.py'],
        )
        event_handler.set_reload_time_sec(reload_time_sec=0)
        event_handler.set_can_keep_reloading(True)
        event_handler.set_reload_services(self.reload_services)
        event_handler.set_reload_time_wait_sec(self.time_wait_sec)

        def add_file_observers(observers):
            for dir_name in self.reload_dirs:
                observer = Observer()
                observer.schedule(event_handler, path=dir_name, recursive=True)
                observer.start()

                observers += [observer]
            return

        def final_cleanup(observers):
            self.log.info([['o', ' ' + ('=' * 55)]])
            self.log.info([['o', ' - Done !', ('=' * 50)]])
            self.log.info([['o', ' ' + ('=' * 75)]])

            for observer in observers:
                observer.stop()
            for observer in observers:
                observer.join()

            clear_procs()

        try:
            spawn_procs()

            observers = []
            add_file_observers(observers)

            while event_handler.get_can_keep_reloading():
                sleep(1)

            final_cleanup(observers)

        except KeyboardInterrupt:
            final_cleanup(observers)

        except Exception as e:
            self.log.info([['wr', e]])
            traceback.print_tb(e.__traceback__)
            raise e

        return
Пример #14
0
class ClockSim(ServiceManager):
    """clock simulation class, simulating the procession of a night

       Only a single active instance is allowed to exist. Multiple passive instances
       are allowed. A passive instance only serves as an interface for the clock via redis
    """

    lock = Lock()

    # ------------------------------------------------------------------
    def __init__(
        self,
        base_config,
        is_passive,
        interrupt_sig=None,
        *args,
        **kwargs,
    ):
        self.class_name = self.__class__.__name__
        super().__init__(service_name=self.class_name)

        self.log = LogParser(base_config=base_config, title=__name__)

        self.base_config = base_config

        self.is_passive = is_passive
        if self.is_passive:
            self.base_config.clock_sim = self

        self.redis = RedisManager(name=self.class_name,
                                  base_config=base_config,
                                  log=self.log)

        self.interrupt_sig = interrupt_sig
        if self.interrupt_sig is None:
            self.interrupt_sig = multiprocessing.Event()

        if not self.is_passive:
            with ClockSim.lock:
                self.setup_active_instance()

        return

    # ------------------------------------------------------------------
    def setup_active_instance(self):
        """setup the active instance of the class
        """

        self.rnd_gen = Random(11)
        self.debug_datetime_now = False

        # sleep duration for thread loops
        self.loop_sleep_sec = 1
        self.pubsub_sleep_sec = 0.1

        # self.is_skip_daytime = False
        self.is_skip_daytime = True

        self.is_short_night = False
        # self.is_short_night = True

        # safety measure
        self.min_speed_factor = 1
        self.max_speed_factor = 10 * 60 * self.loop_sleep_sec

        # speedup simulation e.g.,:
        #   60*10 --> every 1 real sec goes to 10 simulated min
        self.speed_factor = 30
        # self.speed_factor = 10

        self.datetime_epoch = self.base_config.datetime_epoch

        self.init_sim_params_from_redis = True
        # self.init_sim_params_from_redis = False

        self.sim_params = {
            'speed_factor': self.speed_factor,
            'min_speed_factor': self.min_speed_factor,
            'max_speed_factor': self.max_speed_factor,
            'is_skip_daytime': self.is_skip_daytime,
            'is_short_night': self.is_short_night,
        }
        self.set_sim_speed(
            data_in={
                'speed_factor': self.speed_factor,
                'is_skip_daytime': self.is_skip_daytime,
                'is_short_night': self.is_short_night,
            },
            from_redis=self.init_sim_params_from_redis,
        )

        # make sure this is the only active instance
        self.init_active_instance()

        self.init_night_times()

        self.setup_threads()

        return

    # ------------------------------------------------------------------
    def setup_threads(self):
        """register threads to be run after this and all other services have
           been initialised
        """

        self.add_thread(target=self.loop_main)
        self.add_thread(target=self.pubsub_sim_params)

        return

    # ------------------------------------------------------------------
    def check_passive(self):
        """check if this is an active or passive instance

            if this is a passive instance, make sure that an active instance
            has been initialised by some other proccess. after n_sec_try of
            waiting, raise an exception

            Returns
            -------
            bool
                is this a passive instance
        """

        need_check = (self.can_loop() and self.is_passive
                      and not self.has_active_instance())
        # print('xxxxxxxx', self.can_loop() , self.is_passive , self.has_active_instance(),'---',need_check)
        if not need_check:
            return self.is_passive

        n_sec_sleep, n_sec_try = 0.01, 10
        n_loops = 1 + int(n_sec_try / n_sec_sleep)
        # check that the active instance has finished the initialisation stage
        for n_loop in range(n_loops + 1):
            sleep(n_sec_sleep)

            active_state = self.has_active_instance() or (not self.can_loop())
            if active_state:
                break
            if n_loop >= n_loops:
                raise Exception(
                    ' - ClockSim active instance can not initialise ?!?!')
            if n_loop > 0 and (n_loop % int(1 / n_sec_sleep) == 0):
                self.log.warn([
                    [
                        'r', ' - ClockSim blocking ( service_name = ',
                        self.class_name
                    ],
                    [
                        'r',
                        ' ) --> waiting for the active instance to init ...'
                    ],
                ])

        return self.is_passive

    # ------------------------------------------------------------------
    def get_time_now_sec(self):
        datetime_now = self.get_datetime_now()
        time_now_sec = int(datetime_to_secs(datetime_now))

        return time_now_sec

    # ------------------------------------------------------------------
    def get_is_night_now(self):
        if self.check_passive():
            return self.redis.get('clock_sim_is_night_now')

        return self.is_night_now

    # ------------------------------------------------------------------
    def get_n_nights(self):
        if self.check_passive():
            return self.redis.get('clock_sim_n_nights')

        return self.n_nights

    # ------------------------------------------------------------------
    def get_night_start_sec(self):
        if self.check_passive():
            return self.redis.get('clock_sim_night_start_sec')

        return self.night_start_sec

    # ------------------------------------------------------------------
    def get_night_end_sec(self):
        if self.check_passive():
            return self.redis.get('clock_sim_night_end_sec')

        return self.night_end_sec

    # ------------------------------------------------------------------
    def get_time_series_start_time_sec(self):
        if self.check_passive():
            start_time_sec = self.redis.get(
                'clock_sim_time_series_start_time_sec')
        else:
            start_time_sec = self.time_series_start_time_sec

        return int(start_time_sec)

    # ------------------------------------------------------------------
    def get_datetime_now(self):
        if self.check_passive():
            time_now_sec = self.redis.get('clock_sim_time_now_sec')
            return secs_to_datetime(time_now_sec)

        return self.datetime_now

    # ------------------------------------------------------------------
    def is_night_time_now(self):
        time_now_sec = self.get_time_now_sec()
        is_night = (time_now_sec > self.get_night_start_sec()
                    and time_now_sec <= self.get_night_end_sec())
        return is_night

    # ------------------------------------------------------------------
    def get_night_duration_sec(self):
        return (self.get_night_end_sec() - self.get_night_start_sec())

    # ------------------------------------------------------------------
    def get_astro_night_start_sec(self):
        # beginig of the astronomical night
        return int(self.get_night_start_sec())

    # ------------------------------------------------------------------
    def get_sim_params(self):
        if self.check_passive():
            sim_params = self.redis.get(name='clock_sim_sim_params')
        else:
            sim_params = self.sim_params

        return sim_params

    # ------------------------------------------------------------------
    def get_speed_factor(self):
        sim_params = self.get_sim_params()
        return sim_params['speed_factor']

    # ------------------------------------------------------------------
    def get_sec_since_midnight(self):
        days_since_epoch = (self.datetime_now - self.datetime_epoch).days
        sec_since_midnight = (
            (self.datetime_now - self.datetime_epoch).seconds +
            timedelta(days=days_since_epoch).total_seconds())
        return sec_since_midnight

    # ------------------------------------------------------------------
    def init_night_times(self):
        """reset the night
        """

        self.n_nights = 0
        self.datetime_now = None

        self.night_start_sec = datetime_to_secs(self.datetime_epoch)
        self.night_end_sec = datetime_to_secs(self.datetime_epoch)
        self.time_series_start_time_sec = self.night_start_sec

        self.set_night_times()

        self.update_once()

        return

    # ------------------------------------------------------------------
    def update_once(self):
        """single update, to be run as part of a loop
        """

        self.datetime_now += timedelta(seconds=self.loop_sleep_sec *
                                       self.speed_factor)

        if self.debug_datetime_now:
            self.log.info([
                ['g', ' --- Now (night:', self.n_nights, '/', ''],
                ['p', self.is_night_time_now()],
                ['g', ') '],
                ['y', self.datetime_now],
                [
                    'c',
                    ' (' + str(datetime_to_secs(self.datetime_now)) + ' sec)'
                ],
            ])

        self.update_n_night()

        time_now_sec = datetime_to_secs(self.datetime_now)
        is_night_now = self.is_night_time_now()

        self.redis.set(
            name='clock_sim_time_now_sec',
            data=time_now_sec,
        )
        self.redis.set(
            name='clock_sim_is_night_now',
            data=is_night_now,
        )
        self.redis.set(
            name='clock_sim_n_nights',
            data=self.n_nights,
        )
        self.redis.set(
            name='clock_sim_night_start_sec',
            data=self.night_start_sec,
        )
        self.redis.set(
            name='clock_sim_night_end_sec',
            data=self.night_end_sec,
        )
        self.redis.set(
            name='clock_sim_time_series_start_time_sec',
            data=self.time_series_start_time_sec,
        )
        return

    # ------------------------------------------------------------------
    def set_night_times(self):
        """reset the night
        """

        night_start_hours = self.rnd_gen.randint(18, 19)
        night_start_minutes = self.rnd_gen.randint(0, 59)
        night_end_hours = self.rnd_gen.randint(4, 5)
        night_end_minutes = self.rnd_gen.randint(0, 59)

        # short night for debugging
        if self.is_short_night:
            night_start_hours = 23
            night_start_minutes = 0
            night_end_hours = 2
            night_end_minutes = 0

        if self.datetime_now is None:
            self.datetime_now = self.datetime_epoch.replace(
                hour=(night_start_hours - 1), )

        self.time_series_start_time_sec = self.night_start_sec

        n_days = (self.datetime_now - self.datetime_epoch).days

        self.night_start_sec = timedelta(
            days=n_days,
            hours=night_start_hours,
            minutes=night_start_minutes,
            seconds=0,
        ).total_seconds()

        # e.g., night ends at 06:40
        self.night_end_sec = timedelta(
            days=(n_days + 1),
            hours=night_end_hours,
            minutes=night_end_minutes,
            seconds=0,
        ).total_seconds()

        if self.is_skip_daytime or self.is_short_night:
            self.datetime_now = (secs_to_datetime(self.night_start_sec) -
                                 timedelta(seconds=10))

        night_start = date_to_string(
            secs_to_datetime(self.night_start_sec),
            date_string=None,
        )
        night_end = date_to_string(
            secs_to_datetime(self.night_end_sec),
            date_string=None,
        )
        self.log.info([
            ['b', ' - setting new night: ['],
            ['g', night_start],
            ['b', ' --> '],
            ['g', night_end],
            ['b', ']'],
        ])

        return

    # ------------------------------------------------------------------
    def update_n_night(self):
        sec_since_midnight = self.get_sec_since_midnight()
        days_since_epoch = (self.datetime_now - self.datetime_epoch).days

        is_new_day = days_since_epoch > self.n_nights
        is_past_night_time = sec_since_midnight > self.night_end_sec

        if is_new_day and is_past_night_time:
            self.n_nights = days_since_epoch
            self.set_night_times()

        return

    # ------------------------------------------------------------------
    def need_data_update(self, update_opts):
        """check if a service needs to run an update, where
           updates only happen after min_wait of simulation time
        """

        time_now = self.get_time_now_sec()

        set_prev_update = (('prev_update' not in update_opts.keys())
                           or (update_opts['prev_update'] is None))
        if set_prev_update:
            update_opts['prev_update'] = time_now - 2 * update_opts['min_wait']

        time_diff = time_now - update_opts['prev_update']
        can_update = (time_diff > update_opts['min_wait'])

        # updates only happen during the astronimical night
        is_night_time = self.is_night_time_now()

        need_update = (is_night_time and can_update)
        if need_update:
            update_opts['prev_update'] = time_now

        return need_update

    # ------------------------------------------------------------------
    def set_sim_speed(self, data_in, from_redis=False):
        """set parameters which determine the lenght of the night, the
           real-time duration, given a speed factor, the delay between nights, etc.
        """

        speed_factor = data_in['speed_factor']
        is_skip_daytime = data_in['is_skip_daytime']
        is_short_night = data_in['is_short_night']

        if from_redis:
            red_data = self.redis.get(name='clock_sim_sim_params')

            if red_data is not None:
                speed_factor = red_data['speed_factor']
                is_skip_daytime = red_data['is_skip_daytime']
                is_short_night = red_data['is_short_night']

        if speed_factor is not None:
            speed_factor = float(speed_factor)

            is_ok = (speed_factor >= self.min_speed_factor
                     and speed_factor <= self.max_speed_factor)
            if not is_ok:
                raise ValueError(
                    'trying to set speed_factor out of bounds ...',
                    speed_factor)

            self.speed_factor = float(speed_factor)

        if is_skip_daytime is not None:
            self.is_skip_daytime = is_skip_daytime

        if is_short_night is not None:
            self.is_short_night = is_short_night

        self.log.info([
            ['b', ' - updating clock_sim_sim_params: '],
            ['c', '   speed_factor: '],
            ['p', self.speed_factor],
            ['c', ' , is_skip_daytime: '],
            ['p', self.is_skip_daytime],
            ['c', ' , is_short_night: '],
            ['p', self.is_short_night],
        ])

        self.sim_params = {
            'speed_factor': self.speed_factor,
            'min_speed_factor': self.min_speed_factor,
            'max_speed_factor': self.max_speed_factor,
            'is_skip_daytime': self.is_skip_daytime,
            'is_short_night': self.is_short_night,
        }
        self.redis.set(
            name='clock_sim_sim_params',
            data=self.sim_params,
        )

        self.redis.publish(channel='clock_sim_updated_sim_params')

        return

    # ------------------------------------------------------------------
    def loop_main(self):
        """main loop running in its own thread, updating the night
        """

        self.log.info([['g', ' - starting ClockSim.loop_main ...']])

        while self.can_loop():
            sleep(self.loop_sleep_sec)
            with ClockSim.lock:
                self.update_once()

        self.log.info([['c', ' - ending ClockSim.loop_main ...']])

        return

    # ------------------------------------------------------------------
    def pubsub_sim_params(self):
        """loop running in its own thread, reacting to pubsub events
        """

        self.log.info([['g', ' - starting ClockSim.pubsub_sim_params ...']])

        # setup the channel once
        pubsub_tag = 'clock_sim_set_sim_params'
        pubsub = self.redis.pubsub_subscribe(pubsub_tag)

        # listen to changes on the channel and do stuff
        while self.can_loop():
            sleep(self.pubsub_sleep_sec)

            msg = self.redis.pubsub_get_message(pubsub=pubsub)
            if msg is None:
                continue

            with ClockSim.lock:
                keys = ['speed_factor', 'is_skip_daytime', 'is_short_night']
                data_out = dict()
                for key in keys:
                    data_out[
                        key] = msg['data'][key] if key in msg['data'] else None

                self.set_sim_speed(data_in=data_out)

        self.log.info([['c', ' - ending ClockSim.pubsub_sim_params ...']])

        return
Пример #15
0
    def __init__(self, ws_send, *args, **kwargs):
        self.ws_send = ws_send

        self.sess_id = None
        self.user_id = None
        self.sess_name = None
        self.user_group = None
        self.user_group_id = None

        self.has_init_sess = False
        self.is_sess_offline = True
        self.is_sess_open = False
        self.log_send_packet = False
        self.sess_ping_time = None

        # is it allowed to restore sessions as part of development
        # or do we always reload web pages on server reloads
        self.can_restore_existing_sess = True
        # self.can_restore_existing_sess = False

        # debug the setup / restoration of sync groups
        self.debug_sync_group = False
        # self.debug_sync_group = True
        self.debug_sync_group = (self.debug_sync_group
                                 and self.base_config.debug_opts['dev'])

        # validate all session widgets on every few seconds
        self.validate_widget_time_sec = 0
        self.min_validate_widget_time_sec = 10

        self.valid_loop_sleep_sec = 0.01
        self.basic_widget_sleep_sec = 1
        self.sess_expire_sec = 15
        self.serv_expire_sec = 30
        self.user_expire_sec = 43200
        self.widget_expire_sec = self.user_expire_sec
        self.cleanup_sleep_sec = 60

        self.n_id_digits = 4
        self.n_serv_msg = 0

        # session ping/pong heartbeat
        self.sess_ping = {
            # interval for sending ping/pong events
            'send_interval_msec': 2500,
            # how much delay is considered ok for a slow session
            'max_interval_good_msec': 500,
            # how much delay is considered ok for a disconnected session
            'max_interval_slow_msec': 1000,
            # how much delay before the client socket is forcibly closed
            # and set in a reconnection attempt loop
            'max_interval_bad_msec': 5000,
        }
        # self.sess_ping = {
        #     # interval for sending ping/pong events
        #     'send_interval_msec': 2000,
        #     # how much delay is considered ok for a slow session
        #     'max_interval_good_msec': 2000,
        #     # how much delay is considered ok for a disconnected session
        #     'max_interval_slow_msec': 6000,
        # }

        self.widget_module_dir = 'frontend_manager.py.widgets'
        self.util_module_dir = 'frontend_manager.py.utils'

        self.loop_prefix = 'ws;loop;'
        self.heartbeat_prefix = 'ws;heartbeat;'

        self.sync_group_id_prefix = 'grp_'
        self.sync_group_title_prefix = 'Group '

        self.icon_prefix = 'icn_'

        self.asyncio_queue = asyncio.Queue()

        self.log = LogParser(base_config=self.base_config, title=__name__)

        self.allowed_widget_types = self.base_config.allowed_widget_types
        self.all_widget_types = [
            a for a in (self.base_config.allowed_widget_types['synced'] +
                        self.base_config.allowed_widget_types['not_synced'])
        ]

        self.redis_port = self.base_config.redis_port
        self.site_type = self.base_config.site_type
        self.allow_panel_sync = self.base_config.allow_panel_sync
        self.is_simulation = self.base_config.is_simulation

        self.redis = RedisManager(name=self.__class__.__name__,
                                  base_config=self.base_config,
                                  log=self.log)

        rnd_seed = get_rnd_seed()
        self.rnd_gen = Random(rnd_seed)

        self.inst_data = self.base_config.inst_data

        if WebsocketBase.serv_id is None:
            self.set_server_id()

        # setup the locker for this server
        self.locker = self.setup_locker()

        # update the lock_namespace (after the session id has been set, maybe
        # later other session parameters would be needed?)
        self.update_lock_namespace()

        return
Пример #16
0
class ViewManager():
    """views for given web addresses
    """

    # ------------------------------------------------------------------
    def __init__(self, base_config, *args, **kwargs):
        self.log = LogParser(base_config=base_config, title=__name__)
        self.log.info([['y', " - ViewManager - "],
                       ['g', base_config.site_type]])

        self.base_config = base_config
        self.app_prefix = base_config.app_prefix
        self.site_type = base_config.site_type
        self.websocket_route = base_config.websocket_route

        return

    # ------------------------------------------------------------------
    def get_display_user_id(self, request):
        user_id = request.authenticated_userid
        return ('' if user_id is None else user_id)

    # ------------------------------------------------------------------
    def get_display_user_group(self, request):
        user_group = ''
        for princ in request.effective_principals:
            if princ.startswith('group:'):
                user_group = princ[len('group:'):]
        return str(user_group)

    # ------------------------------------------------------------------
    # login page with authentication - check the DB for
    # the given user_id/password
    # ------------------------------------------------------------------
    def view_login(self, request):
        view_name = "login"

        # if already logged in, go to the index
        if request.authenticated_userid is not None:
            return HTTPFound(location=request.route_url("index"))

        # preform the authentication check against the DB
        if 'user_name' in request.params and 'password' in request.params:
            user_name = request.params['user_name']
            password = request.params['password']
            hashed_pw = USERS.get(user_name)

            if hashed_pw and check_password(password, hashed_pw):
                headers = remember(request, user_name)
                return HTTPFound(location=request.route_url("index"),
                                 headers=headers)

        return dict(
            location=request.route_url(view_name),
            login=request.authenticated_userid,
            app_prefix=self.app_prefix,
            ns_type=self.site_type,
            websocket_route=self.websocket_route['client'],
            widget_type=view_name,
            display_user_id=self.get_display_user_id(request),
            display_user_group=self.get_display_user_group(request),
        )

    # ------------------------------------------------------------------
    def view_logout(self, request):
        """logout page with a redirect to the login
        """

        # forget the current loged-in user
        headers = forget(request)
        # redirect to the login page
        return HTTPFound(location=request.route_url("index"), headers=headers)

    # ------------------------------------------------------------------
    @forbidden_view_config()
    def view_forbidden(self, request):
        """forbidden view redirects to the login
        """

        return HTTPFound(location=request.route_url("login"))

    # ------------------------------------------------------------------
    def view_index(self, request):
        """index, empty, not-found
        """

        view_name = "index"

        return dict(
            ns_type=self.site_type,
            websocket_route=self.websocket_route['client'],
            widget_type=view_name,
            app_prefix=self.app_prefix,
            login=request.authenticated_userid,
            came_from=request.route_url(view_name),
            display_user_id=self.get_display_user_id(request),
            display_user_group=self.get_display_user_group(request),
        )

    # ------------------------------------------------------------------
    def view_empty(self, request):
        """redirects
        """

        return HTTPFound(location=request.route_url("index"))

    def view_not_found(self, request):
        """redirects
        """

        view_name = "not_found"

        return dict(
            ns_type=self.site_type,
            websocket_route=self.websocket_route['client'],
            widget_type=view_name,
            app_prefix=self.app_prefix,
            login=request.authenticated_userid,
            location=request.route_url(view_name),
            display_user_id=self.get_display_user_id(request),
            display_user_group=self.get_display_user_group(request),
        )

    # ------------------------------------------------------------------
    def view_common(self, request):
        """the widgets
        """

        view_name = request.matched_route.name

        return dict(
            ns_type=self.site_type,
            websocket_route=self.websocket_route['client'],
            widget_type=view_name,
            app_prefix=self.app_prefix,
            login=request.authenticated_userid,
            came_from=request.route_url(view_name),
            display_user_id=self.get_display_user_id(request),
            display_user_group=self.get_display_user_group(request),
        )
    async def websocket_manager_interface(scope, receive, send):
        try:
            log = LogParser(base_config=WebsocketManager.base_config,
                            title=__name__)
        except Exception as e:
            raise e

        try:
            manager = WebsocketManager(ws_send=send)
        except Exception as e:
            log.error([['r', e]])
            raise e

        try:
            while True:
                try:
                    message = await receive()
                except Exception as e:
                    traceback.print_tb(e.__traceback__)
                    raise e

                if message["type"] == "websocket.connect":
                    try:
                        # blocking connect event
                        await send({"type": "websocket.accept"})
                        await manager.send_initial_connect()
                    except Exception as e:
                        log.error([['r', e]])
                        traceback.print_tb(e.__traceback__)

                elif message["type"] == "websocket.receive":
                    text = message.get("text")
                    if text:
                        text = json.loads(text)

                        try:
                            # non-blocking receive events
                            asyncio.ensure_future(manager.receive(data=text))
                        except Exception as e:
                            traceback.print_tb(e.__traceback__)
                            log.error([['r', e]])

                elif message["type"] == "websocket.disconnect":
                    try:
                        # blocking disconnect event
                        await manager.sess_disconnected()
                    except Exception as e:
                        traceback.print_tb(e.__traceback__)
                        log.error([['r', e]])

                    break
                else:
                    raise Exception('unknown message type ', message)

        except Exception as e:
            log.error([['r', e]])
            raise e
Пример #18
0
class SchedulerACS(ThreadManager):
    has_active = False
    lock = Lock()

    def __init__(self, base_config, interrupt_sig):
        self.log = LogParser(base_config=base_config, title=__name__)
        # self.log.info([['y', ' - SchedulerACS - '], ['g', base_config.site_type]])

        if SchedulerACS.has_active:
            raise Exception('Can not instantiate SchedulerACS more than once...')
        else:
            SchedulerACS.has_active = True

        self.base_config = base_config
        self.site_type = self.base_config.site_type
        self.clock_sim = self.base_config.clock_sim
        self.inst_data = self.base_config.inst_data

        self.interrupt_sig = interrupt_sig

        self.tel_ids = self.inst_data.get_inst_ids(inst_types=['LST', 'MST', 'SST'])

        self.no_sub_arr_name = self.base_config.no_sub_arr_name

        self.class_name = self.__class__.__name__
        self.redis = RedisManager(
            name=self.class_name, base_config=self.base_config, log=self.log
        )

        self.debug = not True
        self.expire_sec = 86400  # one day
        # self.expire_sec = 5

        self.MockSched = None

        # self.client = PySimpleClient()
        # self.supervisor = self.client.getComponent('ArraySupervisor')
        # self.log.info([['y',' - SchedulerACS - '],['p','got supervisor!']])

        self.phases_exe = dict()
        self.phases_exe['start'] = [
            'run_config_mount',
            'run_config_camera',
            'run_config_daq',
            'run_config_mirror',
        ]
        self.phases_exe['during'] = [
            'run_take_data',
        ]
        self.phases_exe['finish'] = [
            'run_finish_mount',
            'run_finish_camera',
            'run_finish_daq',
        ]

        self.az_min_max = [-180, 180]

        self.loop_sleep_sec = 3

        rnd_seed = get_rnd_seed()
        # rnd_seed = 10987268332
        self.rnd_gen = Random(rnd_seed)

        self.setup_threads()

        self.MockSched = MockSched(
            base_config=self.base_config, interrupt_sig=self.interrupt_sig
        )

        # ------------------------------------------------------------------
        # temporary hack to be consistent with SchedulerStandalone
        # ------------------------------------------------------------------
        self.external_events = []
        self.external_clock_events = []
        external_generate_clock_events(self)
        external_generate_events(self)

        return

    # ---------------------------------------------------------------------------
    def setup_threads(self):
        self.add_thread(target=self.loop_main)
        return

    # ------------------------------------------------------------------
    def reset_blocks(self):
        debug_tmp = False
        # debug_tmp = True

        if debug_tmp:
            self.log.info([['p', ' - SchedulerACS.reset_blocks() ...']])

        if self.MockSched is None:
            sleep(0.1)
            self.log.debug([[
                'r', ' - no MockSched ... will try to reset_blocks() again ...'
            ]])
            self.reset_blocks()
            return

        night_duration_sec = self.clock_sim.get_night_duration_sec()

        sched_blocks = self.MockSched.get_blocks()

        obs_block_ids = dict()
        obs_block_ids['wait'] = []
        obs_block_ids['run'] = []
        obs_block_ids['done'] = []
        obs_block_ids['cancel'] = []
        obs_block_ids['fail'] = []

        blocks_run = []
        active = sched_blocks['active']

        pipe = self.redis.get_pipe()

        for sched_blk_id, schBlock in sched_blocks['blocks'].items():

            sub_array_tels = (
                schBlock['sched_block'].config.instrument.sub_array.telescopes
            )
            tel_ids = [x.id for x in sub_array_tels]

            obs_blocks = schBlock['sched_block'].observation_blocks

            # get the total duration of all obs blocks
            block_duration_sec = 0
            for n_obs_block_now in range(len(obs_blocks)):
                obs_block_id = obs_blocks[n_obs_block_now].id
                block_duration_sec += (sched_blocks['metadata'][obs_block_id]['duration'])

            targets = get_rnd_targets(
                self=self,
                night_duration_sec=night_duration_sec,
                block_duration_sec=block_duration_sec,
            )

            for n_obs_block_now in range(len(obs_blocks)):
                obs_block_now = obs_blocks[n_obs_block_now]
                obs_block_id = obs_block_now.id
                # trg_id = obs_block_now.src.id
                # coords = obs_block_now.src.coords.horizontal

                sched_metadata = sched_blocks['metadata'][obs_block_id]
                timestamp = sched_metadata['timestamp']
                metadata = sched_metadata['metadata']
                status = sched_metadata['status']
                phases = sched_metadata['phases']
                duration = sched_metadata['duration']
                start_time_sec_plan = sched_metadata['start_time_sec_plan']
                start_time_sec_exe = sched_metadata['start_time_sec_exe']

                start_time_sec = (
                    start_time_sec_plan
                    if start_time_sec_exe is None else start_time_sec_exe
                )

                # state of ob
                if status == sb.OB_PENDING:
                    state = 'wait'
                elif status == sb.OB_RUNNING:
                    state = 'run'
                elif status == sb.OB_CANCELED:
                    state = 'cancel'
                elif status == sb.OB_FAILED:
                    state = 'fail'
                else:
                    state = 'done'

                # final sanity check
                if state == 'run' and sched_blk_id not in active:
                    state = 'done'

                run_phase = []
                if state == 'run':
                    for p in phases:
                        if p.status == sb.OB_RUNNING:
                            phase_name = 'run_' + p.name
                            for phases_exe in self.phases_exe:
                                if phase_name in self.phases_exe[phases_exe]:
                                    run_phase.append(phase_name)

                    if debug_tmp:
                        self.log.debug([
                            ['b', ' -- run_phase - '],
                            ['y', run_phase, ' '],
                            ['b', tel_ids],
                        ])

                can_run = True
                if state == 'cancel' or state == 'fail':
                    can_run = (self.clock_sim.get_time_now_sec() >= start_time_sec)

                exe_state = {'state': state, 'can_run': can_run}

                time = {
                    'start': start_time_sec,
                    'duration': duration,
                }
                time['end'] = time['start'] + time['duration']

                telescopes = {
                    'large': {
                        'min': int(len(list(filter(lambda x: 'L' in x, tel_ids))) / 2),
                        'max': 4,
                        'ids': list(filter(lambda x: 'L' in x, tel_ids))
                    },
                    'medium': {
                        'min': int(len(list(filter(lambda x: 'M' in x, tel_ids))) / 2),
                        'max': 25,
                        'ids': list(filter(lambda x: 'M' in x, tel_ids))
                    },
                    'small': {
                        'min': int(len(list(filter(lambda x: 'S' in x, tel_ids))) / 2),
                        'max': 70,
                        'ids': list(filter(lambda x: 'S' in x, tel_ids))
                    }
                }

                pointings = get_rnd_pointings(
                    self=self,
                    tel_ids=tel_ids,
                    targets=targets,
                    sched_block_id=sched_blk_id,
                    obs_block_id=obs_block_id,
                    n_obs_now=n_obs_block_now,
                )

                block = dict()
                block['sched_block_id'] = sched_blk_id
                block['obs_block_id'] = obs_block_id
                block['time'] = time
                block['metadata'] = metadata
                block['timestamp'] = timestamp
                block['telescopes'] = telescopes
                block['exe_state'] = exe_state
                block['run_phase'] = run_phase
                block['targets'] = targets
                block['pointings'] = pointings
                block['tel_ids'] = tel_ids

                if state == 'run':
                    blocks_run += [block]

                obs_block_ids[state].append(obs_block_id)

                pipe.set(
                    name=obs_block_id,
                    data=block,
                    expire_sec=self.expire_sec,
                    packed=True
                )

        pipe.set(name='obs_block_ids_' + 'wait', data=obs_block_ids['wait'], packed=True)
        pipe.set(name='obs_block_ids_' + 'run', data=obs_block_ids['run'], packed=True)
        pipe.set(name='obs_block_ids_' + 'done', data=obs_block_ids['done'], packed=True)
        pipe.set(
            name='obs_block_ids_' + 'cancel', data=obs_block_ids['cancel'], packed=True
        )
        pipe.set(name='obs_block_ids_' + 'fail', data=obs_block_ids['fail'], packed=True)

        pipe.execute()

        update_sub_arrs(self=self, blocks=blocks_run)

        return

    # # ------------------------------------------------------------------
    # #
    # # ------------------------------------------------------------------
    # def update_sub_arrs(self, blocks=None):
    #     # inst_pos = self.redis.h_get_all(name='inst_pos')

    #     pipe = self.redis.get_pipe()

    #     if blocks is None:
    #         obs_block_ids = self.redis.get(
    #             name=('obs_block_ids_' + 'run'), packed=True, default_val=[]
    #         )
    #         for obs_block_id in obs_block_ids:
    #             pipe.get(obs_block_id)

    #         blocks = pipe.execute(packed=True)

    #     # sort so last is first in the list (latest sub-array defined gets the telescope)
    #     blocks = sorted(
    #         blocks, cmp=lambda a, b: int(b['timestamp']) - int(a['timestamp'])
    #     )
    #     # print [a['timestamp'] for a in blocks]

    #     sub_arrs = []
    #     all_tel_ids_in = []
    #     for n_block in range(len(blocks)):
    #         block_tel_ids = blocks[n_block]['tel_ids']
    #         pnt_id = blocks[n_block]['point_id']
    #         pointing_name = blocks[n_block]['pointing_name']

    #         # compile the telescope list for this block
    #         tels = []
    #         for id_now in block_tel_ids:
    #             if id_now not in all_tel_ids_in:
    #                 all_tel_ids_in.append(id_now)
    #                 tels.append({'id': id_now})

    #         # add the telescope list for this block
    #         sub_arrs.append({'id': pnt_id, 'N': pointing_name, 'children': tels})

    #     # ------------------------------------------------------------------
    #     # now take care of all free telescopes
    #     # ------------------------------------------------------------------
    #     tels = []
    #     all_tel_ids = [x for x in self.tel_ids if x not in all_tel_ids_in]
    #     for id_now in all_tel_ids:
    #         tels.append({'id': id_now})

    #     sub_arrs.append({'id': self.no_sub_arr_name, 'children': tels})

    #     # ------------------------------------------------------------------
    #     # for now - a simple/stupid solution, where we write the sub-arrays and publish each
    #     # time, even if the content is actually the same ...
    #     # ------------------------------------------------------------------
    #     self.redis.set(name='sub_arrs', data=sub_arrs, packed=True)
    #     self.redis.publish(channel='sub_arrs')

    #     return

    # ------------------------------------------------------------------
    def loop_main(self):
        self.log.info([['g', ' - starting SchedulerACS.loop_main ...']])

        pipe = self.redis.get_pipe()

        pipe.set(name='obs_block_ids_' + 'wait', data='')
        pipe.set(name='obs_block_ids_' + 'run', data='')
        pipe.set(name='obs_block_ids_' + 'done', data='')
        pipe.set(name='obs_block_ids_' + 'cancel', data='')
        pipe.set(name='obs_block_ids_' + 'fail', data='')

        pipe.execute()

        update_sub_arrs(self=self, blocks=[])

        while self.can_loop():
            sleep(self.loop_sleep_sec)

            with SchedulerACS.lock:
                self.reset_blocks()

        self.log.info([['c', ' - ending SchedulerACS.loop_main ...']])

        return
Пример #19
0
class time_of_night(ServiceManager):

    # ------------------------------------------------------------------
    def __init__(self,
                 base_config,
                 service_name,
                 interrupt_sig,
                 end_time_sec=None,
                 timescale=None,
                 *args,
                 **kwargs):
        self.class_name = self.__class__.__name__
        service_name = (service_name
                        if service_name is not None else self.class_name)
        super().__init__(service_name=service_name)

        self.log = LogParser(base_config=base_config, title=__name__)

        self.base_config = base_config

        self.service_name = service_name
        self.interrupt_sig = interrupt_sig

        # 28800 -> 8 hour night
        self.end_time_sec = 28800 if end_time_sec is None else end_time_sec
        # 0.035 -> have 30 minutes last for one minute in real time
        self.timescale = 0.07 if end_time_sec is None else timescale
        # 0.0035 -> have 30 minutes last for 6 sec in real time
        # if not has_acs:
        #   self.timescale /= 2
        # self.timescale /= 20

        self.redis = RedisManager(name=self.class_name,
                                  base_config=base_config,
                                  log=self.log)

        self.n_night = -1

        # sleep duration for thread loops
        self.loop_sleep_sec = 1

        # range in seconds of time-series data to be stored for eg monitoring points
        self.epoch = datetime.utcfromtimestamp(0)
        self.time_series_n_seconds = 60 * 30
        self.second_scale = 1000

        self.reset_night()

        # make sure this is the only active instance
        self.init_active_instance()

        self.setup_threads()

        return

    # ------------------------------------------------------------------
    def setup_threads(self):

        self.add_thread(target=self.loop_main)

        return

    # ------------------------------------------------------------------
    def get_total_time_seconds(self):
        return self.end_time_sec

    # ------------------------------------------------------------------
    def get_n_night(self):
        return self.n_night

    # ------------------------------------------------------------------
    def get_timescale(self):
        return self.timescale

    # ------------------------------------------------------------------
    def get_current_time(self, n_digits=3):
        if n_digits >= 0 and n_digits is not None:
            return (int(floor(self.time_now_sec)) if n_digits == 0 else round(
                self.time_now_sec, n_digits))
        else:
            return self.time_now_sec

    # ------------------------------------------------------------------
    def get_second_scale(self):
        return self.second_scale

    # ------------------------------------------------------------------
    def get_reset_time(self):
        return self.real_reset_time_sec

    # ------------------------------------------------------------------
    def get_real_time_sec(self):
        """the global function for the current system time
        """

        return int((datetime.utcnow() - self.epoch).total_seconds() *
                   self.second_scale)

    # ------------------------------------------------------------------
    def get_time_series_start_time_sec(self):
        return self.get_real_time_sec(
        ) - self.time_series_n_seconds * self.second_scale

    # ------------------------------------------------------------------
    def get_start_time_sec(self):
        return 0

    # ------------------------------------------------------------------
    def reset_night(self, log=None):
        self.n_night += 1
        self.real_reset_time_sec = self.get_real_time_sec()

        time_now_sec = int(floor(self.get_start_time_sec()))
        self.time_now_sec = time_now_sec

        if log is not None:
            self.log.info([
                ['r', '- reset_night(): '],
                ['y', 'time_now_sec:', self.time_now_sec, ', '],
                ['b', 'n_night:', self.n_night, ', '],
                ['g', 'real_reset_time_sec:', self.real_reset_time_sec],
            ])

        pipe = self.redis.get_pipe()

        pipe.set(name='time_of_night_' + 'scale', data=self.timescale)
        pipe.set(name='time_of_night_' + 'start', data=time_now_sec)
        pipe.set(name='time_of_night_' + 'end', data=self.end_time_sec)
        pipe.set(name='time_of_night_' + 'now', data=time_now_sec)

        pipe.execute()

        return

    # ------------------------------------------------------------------
    def loop_main(self):
        self.log.info([['g', ' - starting time_of_night.loop_main ...']])

        while self.can_loop():
            self.time_now_sec += self.loop_sleep_sec / self.timescale
            if self.time_now_sec > self.end_time_sec:
                self.reset_night()

            self.redis.set(name='time_of_night_' + 'now',
                           data=int(floor(self.time_now_sec)))

            sleep(self.loop_sleep_sec)

        self.log.info([['c', ' - ending time_of_night.loop_main ...']])

        return
Пример #20
0
        redis_port=redis_port,
        app_port=app_port,
        app_prefix=app_prefix,
        app_host=app_host,
        log_level=log_level,
        websocket_route=websocket_route,
        allow_panel_sync=allow_panel_sync,
        debug_opts=debug_opts,
        is_simulation=is_simulation,
        widget_info=widget_info,
        allowed_widget_types=allowed_widget_types,
    )

    log = LogParser(
        base_config=base_config,
        title=__name__,
        log_level=log_level,
        log_file=log_file,
    )
    log.info([['wg', ' - Starting pyramid app -', app_name, '...']])
    log.info([['c', ' - has_acs = '], [('g' if has_acs else 'r'), has_acs]])

    settings_log = [['g', ' - server settings:\n']]
    for k, v in settings.items():
        settings_log += [['b', str(k)], [': ']]
        settings_log += [['c', str(v)], [',  ']]
    log.info(settings_log)

    # # do_flush_redis = True
    # if do_flush_redis:
    #     from shared.RedisManager import RedisManager
    #     log.warn([['wr', ' ---- flusing redis ... ----']])
Пример #21
0
class MockTarget():
    """target simulation class, simulating the execution of observations
    """

    # ------------------------------------------------------------------
    def __init__(self, base_config):
        self.log = LogParser(base_config=base_config, title=__name__)
        self.log.info([['g', ' - starting MockTarget ...']])

        self.base_config = base_config
        self.site_type = self.base_config.site_type

        self.class_name = self.__class__.__name__
        self.redis = RedisManager(
            name=self.class_name, base_config=self.base_config, log=self.log
        )

        # ------------------------------------------------------------------
        rnd_seed = 10989152934
        # self.rnd_gen = Random(rnd_seed)
        self.rnd_gen = Random()

        self.az_min_max = [-180, 180]
        self.zen_min_max_tel = [0, 70]

        self.init()

        return

    # ------------------------------------------------------------------
    def init(self):
        self.create_target()
        return

    # ------------------------------------------------------------------
    def create_target(self):
        n_rnd_targets = max(5, int(self.rnd_gen.random() * 12))
        start_time_sec = 0
        end_time_sec = 28800
        step = end_time_sec / (n_rnd_targets + 1)
        offset = step * 0.66

        self.target_ids = []
        self.targets = []

        pipe = self.redis.get_pipe()

        for index in range(n_rnd_targets):
            # if self.redis.exists('inst_pos'):
            #     inst_pos_in = self.redis.h_get_all(name="inst_pos",
            target = {
                "id": "target_" + str(index),
                "name": "target_" + str(index),
            }

            target["pos"] = []
            target["pos"] += [
                self.rnd_gen.random() *
                (self.az_min_max[1] - self.az_min_max[0]) + self.az_min_max[0]
            ]
            target["pos"] += [
                self.rnd_gen.random() *
                (self.zen_min_max_tel[1] - self.zen_min_max_tel[0])
                + self.zen_min_max_tel[0]
            ]

            minimal = start_time_sec + (step * index) - (self.rnd_gen.random() * offset)

            optimal = 1500 + start_time_sec + (step * index) + (step * 0.5) + (
                (self.rnd_gen.random() - 0.5) * offset
            )

            maximal = 4000 + start_time_sec + (step * (index + 1)) + (
                self.rnd_gen.random() * offset
            )

            target["observability"] = {
                "minimal": minimal,
                "optimal": optimal,
                "maximal": maximal
            }

            self.target_ids.append("target_" + str(index))
            self.targets.append(target)

            pipe.set(name='target_' + str(index), data=target)

        pipe.execute()

        pipe.set(name='target_ids', data=self.target_ids)
        pipe.execute()

        return
Пример #22
0
    def __init__(self, base_config, service_name, interrupt_sig):
        self.class_name = self.__class__.__name__
        service_name = (service_name if service_name is not None else self.class_name)
        super().__init__(service_name=service_name)

        self.log = LogParser(base_config=base_config, title=__name__)
        self.log.info([['g', ' - starting SchedulerStandalone ...']])

        self.base_config = base_config
        self.site_type = self.base_config.site_type
        self.clock_sim = self.base_config.clock_sim
        self.inst_data = self.base_config.inst_data

        self.service_name = service_name
        self.interrupt_sig = interrupt_sig

        self.tel_ids = self.inst_data.get_inst_ids(inst_types=['LST', 'MST', 'SST'])
        self.sub_array_insts = self.inst_data.get_sub_array_insts()

        self.no_sub_arr_name = self.base_config.no_sub_arr_name

        self.redis = RedisManager(
            name=self.class_name, base_config=self.base_config, log=self.log
        )

        self.debug = not True
        self.expire_sec = 86400 * 2  # two days
        # self.expire_sec = 5

        # self.max_n_obs_block = 4 if self.site_type == 'N' else 7
        # self.max_n_obs_block = min(self.max_n_obs_block, floor(len(self.tel_ids) / 4))

        # sleep duration for thread loops
        self.loop_sleep_sec = 1
        # minimal real-time delay between randomisations (once every self.loop_act_rate sec)
        self.loop_act_rate = max(int(2 / self.loop_sleep_sec), 1)

        self.max_n_cycles = 100
        self.min_n_sched_block = 2  # 2
        self.max_n_sched_block = 5  # 5
        self.min_n_obs_block = 1
        self.max_n_obs_block = 5
        self.min_n_tel_block = 4
        self.max_n_free_tels = 5

        self.name_prefix = get_rnd(n_digits=5, out_type=str)

        self.az_min_max = [-180, 180]
        self.zen_min_max_tel = [0, 70]
        self.zen_min_max_pnt = [0, 20]

        self.phases_exe = {
            'start': [
                'run_config_mount', 'run_config_camera', 'run_config_DAQ',
                'run_config_mirror'
            ],
            'during': ['run_take_data'],
            'finish': ['run_finish_mount', 'run_finish_camera', 'run_finish_cleanup'],
        }

        self.error_rnd_frac = {
            'E1': 0.3,
            'E2': 0.4,
            'E3': 0.5,
            'E4': 0.6,
            'E5': 0.7,
            'E6': 0.8,
            'E7': 0.9,
            'E8': 1,
        }

        self.phase_rnd_frac = {
            'start': 0.29,
            'finish': 0.1,
            'cancel': 0.06,
            'fail': 0.1,
        }

        # 1800 = 30 minutes
        self.obs_block_sec = 1800

        self.n_init_cycle = -1
        self.n_nights = -1

        self.update_name = 'obs_block_update'
        self.sched_block_prefix = 'sched_block_'
        self.obs_block_prefix = 'obs_block_'

        rnd_seed = get_rnd_seed()
        self.rnd_gen = Random(rnd_seed)

        self.external_clock_events = []
        external_generate_clock_events(self)

        self.redis.delete(self.update_name)

        self.init()

        # make sure this is the only active instance
        self.init_active_instance()

        self.setup_threads()

        return
Пример #23
0
    def __init__(self, base_config, interrupt_sig):
        self.log = LogParser(base_config=base_config, title=__name__)
        # self.log.info([['y', ' - SchedulerACS - '], ['g', base_config.site_type]])

        if SchedulerACS.has_active:
            raise Exception('Can not instantiate SchedulerACS more than once...')
        else:
            SchedulerACS.has_active = True

        self.base_config = base_config
        self.site_type = self.base_config.site_type
        self.clock_sim = self.base_config.clock_sim
        self.inst_data = self.base_config.inst_data

        self.interrupt_sig = interrupt_sig

        self.tel_ids = self.inst_data.get_inst_ids(inst_types=['LST', 'MST', 'SST'])

        self.no_sub_arr_name = self.base_config.no_sub_arr_name

        self.class_name = self.__class__.__name__
        self.redis = RedisManager(
            name=self.class_name, base_config=self.base_config, log=self.log
        )

        self.debug = not True
        self.expire_sec = 86400  # one day
        # self.expire_sec = 5

        self.MockSched = None

        # self.client = PySimpleClient()
        # self.supervisor = self.client.getComponent('ArraySupervisor')
        # self.log.info([['y',' - SchedulerACS - '],['p','got supervisor!']])

        self.phases_exe = dict()
        self.phases_exe['start'] = [
            'run_config_mount',
            'run_config_camera',
            'run_config_daq',
            'run_config_mirror',
        ]
        self.phases_exe['during'] = [
            'run_take_data',
        ]
        self.phases_exe['finish'] = [
            'run_finish_mount',
            'run_finish_camera',
            'run_finish_daq',
        ]

        self.az_min_max = [-180, 180]

        self.loop_sleep_sec = 3

        rnd_seed = get_rnd_seed()
        # rnd_seed = 10987268332
        self.rnd_gen = Random(rnd_seed)

        self.setup_threads()

        self.MockSched = MockSched(
            base_config=self.base_config, interrupt_sig=self.interrupt_sig
        )

        # ------------------------------------------------------------------
        # temporary hack to be consistent with SchedulerStandalone
        # ------------------------------------------------------------------
        self.external_events = []
        self.external_clock_events = []
        external_generate_clock_events(self)
        external_generate_events(self)

        return
Пример #24
0
class InstHealth(ServiceManager):
    """instrument health simulation class, simulating changes to the metrics of instruments

       Only a single active instance is allowed to exist
    """

    lock = Lock()

    # ------------------------------------------------------------------
    def __init__(self, base_config, service_name, interrupt_sig):
        self.class_name = self.__class__.__name__
        service_name = (service_name
                        if service_name is not None else self.class_name)
        super().__init__(service_name=service_name)

        self.log = LogParser(base_config=base_config, title=__name__)
        # self.log.info([['y', ' - InstHealth - ']])

        self.base_config = base_config
        self.site_type = self.base_config.site_type
        self.clock_sim = self.base_config.clock_sim
        self.inst_data = self.base_config.inst_data

        self.service_name = service_name
        self.interrupt_sig = interrupt_sig

        self.redis = RedisManager(name=self.class_name,
                                  base_config=self.base_config,
                                  log=self.log)

        self.tel_ids = self.inst_data.get_inst_ids()

        self.inst_health_s0 = dict()
        self.inst_health_s1 = dict()
        self.inst_health = dict()
        self.inst_health_sub_flat = dict()

        # minimum interval of simulation-time to wait before randomising values
        min_wait_update_sec = 10
        self.check_update_opts = {
            'prev_update': None,
            'min_wait': min_wait_update_sec,
        }

        # the fraction of telescopes to randomely update
        self.update_frac = 0.05

        # set debug_updates to 0 to have mild updates, to 1 to have frequent
        # updates for a single inst, or to 2 to frequently update all instruments
        self.debug_updates = 1

        self.inst_data = self.base_config.inst_data
        self.health_tag = self.inst_data.health_tag

        # sleep duration for thread loops
        self.loop_sleep_sec = 1
        # minimal real-time delay between randomisations (once every self.loop_act_rate sec)
        self.loop_act_rate = max(int(5 / self.loop_sleep_sec), 1)

        self.init()

        # make sure this is the only active instance
        self.init_active_instance()

        self.setup_threads()

        return

    # ------------------------------------------------------------------
    def setup_threads(self):

        self.add_thread(target=self.loop_main)

        return

    # ------------------------------------------------------------------
    def init(self):
        # self.log.info([['g', ' - inst_health.init() ...']])

        pipe = self.redis.get_pipe()

        for id_now in self.tel_ids:
            self.inst_health_s0[id_now] = {
                self.health_tag: 100,
                'status': 'run',
                'camera': 100,
                'mirror': 100,
                'mount': 100,
                'daq': 100,
                'aux': 100
            }

            for key, val in self.inst_health_s0[id_now].items():
                pipe.h_set(
                    name='inst_health_summary;' + str(id_now),
                    key=key,
                    data=val,
                )

            # self.redPipe.hmset('inst_health_s0'+str(id_now), self.inst_health_s0[id_now])

        self.inst_health = self.inst_data.get_inst_healths()

        # derive the top-level properties, eg:
        #   {'inst_0', 'camera', 'prc_0', 'mount', 'mirror',
        #    'prc_1', 'aux', 'inst_1', 'daq'}
        self.rnd_props = set()
        for (k_0, v_0) in self.inst_health.items():
            for (k_1, v_1) in v_0.items():
                self.rnd_props.add(k_1)
        self.rnd_props = list(self.rnd_props)

        # a flat dict with references to each level of the original dict
        self.inst_health_sub_flat = dict()
        for id_now in self.tel_ids:
            self.inst_health_sub_flat[id_now] = flatten_dict(
                self.inst_health[id_now])

        for id_now in self.tel_ids:
            self.set_tel_health_s1(id_now)

            for key, val in self.inst_health_sub_flat[id_now].items():
                if 'val' in val['data']:
                    pipe.h_set(name='inst_health_summary;' + str(id_now),
                               key=key,
                               data=val['data']['val'])

        # set the full health metrics for each instrument
        self.inst_health_deep = self.inst_data.get_inst_health_fulls()
        for (id_now, inst) in self.inst_health_deep.items():
            for (field_id, data) in inst.items():
                pipe.h_set(
                    name='inst_health_deep;' + str(id_now),
                    key=field_id,
                    data=data,
                )

        pipe.execute()

        self.rand_once(update_frac=1)

        return

    # ------------------------------------------------------------------
    def set_tel_health_s1(self, id_now):
        self.inst_health_s1[id_now] = {
            'id': id_now,
            self.health_tag: self.inst_health_s0[id_now][self.health_tag],
            'status': 'run',
            'data': [v for v in self.inst_health[id_now].values()]
        }

        return

    # ------------------------------------------------------------------
    def rand_once(self, rnd_seed=-1, update_frac=None):
        ids = self.rand_s0(rnd_seed=rnd_seed, update_frac=update_frac)
        self.rand_s1(tel_id_in=ids, rnd_seed=rnd_seed)

        return

    # ------------------------------------------------------------------
    def rand_s0(self, rnd_seed=-1, update_frac=None):
        if rnd_seed < 0:
            rnd_seed = random.randint(0, 100000)
        rnd_gen = Random(rnd_seed)

        arr_props = dict()

        if update_frac is None:
            update_frac = self.update_frac

        n_rnd_props = len(self.rnd_props)

        pipe = self.redis.get_pipe()

        for id_now in self.tel_ids:

            # example change of the connection status (negative health value)
            if (id_now in ['Lx02']):
                rnd = rnd_gen.random()
                sign = 1 if rnd < 0.5 else -1

                self.inst_health_s0[id_now][self.health_tag] = (
                    sign * abs(self.inst_health_s0[id_now][self.health_tag]))

                pipe.h_set(
                    name='inst_health_summary;' + str(id_now),
                    key=self.health_tag,
                    data=self.inst_health_s0[id_now][self.health_tag],
                )

            if (self.debug_updates >= 2) or (id_now in ['Lx01']):
                update_frac_now = 1
            else:
                update_frac_now = update_frac

            rnd = rnd_gen.random()
            if self.debug_updates == 0:
                if rnd > update_frac_now:
                    continue
            elif self.debug_updates == 1:
                if (id_now not in ['Lx01']) and (rnd < 0.5):
                    continue
                elif rnd > update_frac_now:
                    continue
            elif self.debug_updates == 2:
                pass

            arr_props[id_now] = self.inst_health_s0[id_now]

            rnd = rnd_gen.random()
            if rnd < 0.06:
                health_tot = rnd_gen.randint(0, 100)
            elif rnd < 0.1:
                health_tot = rnd_gen.randint(40, 100)
            else:
                health_tot = rnd_gen.randint(60, 100)

            if self.debug_updates == 0:
                pass
            elif self.debug_updates == 1:
                if (id_now in ['Lx01']) and rnd < 0.5:
                    health_tot = rnd_gen.randint(0, 100)
            elif self.debug_updates >= 2:
                health_tot = rnd_gen.randint(0, 100)

            arr_props[id_now][self.health_tag] = health_tot

            bad = 100 - health_tot
            for n_prop_now in range(n_rnd_props):
                rnd = rnd_gen.randint(0, bad)

                if n_prop_now == n_rnd_props - 1:
                    rnd = bad
                else:
                    bad -= rnd

                if self.rnd_props[n_prop_now] in arr_props[id_now]:
                    arr_props[id_now][self.rnd_props[n_prop_now]] = 100 - rnd

            self.inst_health_s0[id_now] = arr_props[id_now]

            self.inst_health_s1[id_now][self.health_tag] = health_tot

            for key, val in self.inst_health_s0[id_now].items():
                pipe.h_set(name='inst_health_summary;' + str(id_now),
                           key=key,
                           data=val)
                # print('inst_health_summary;' + str(id_now), key, val)

        pipe.execute()

        ids = [id_now for id_now in arr_props]

        return ids

    # ------------------------------------------------------------------
    def rand_s1(self, tel_id_in=None, rnd_seed=-1):

        if rnd_seed < 0:
            rnd_seed = random.randint(0, 100000)
        rnd_gen = Random(rnd_seed)

        rnd_props = [p for p in self.rnd_props]

        # recursive randomization of all 'val' values of the
        # dict and its child elements
        def set_rnd_props(data_in):
            keys = data_in.keys()

            if 'children' in keys:
                for child in data_in['children']:
                    set_rnd_props(child)

            if 'val' in keys:
                rnd_val(data_in)

            return

        # in-place randomisation of the 'val' key of an input dict
        def rnd_val(data_in):
            if rnd_gen.random() < 0.1:
                rnd_now = rnd_gen.uniform(-30, 30)
            else:
                rnd_now = rnd_gen.uniform(-10, 10)

            val = data_in['val'] + ceil(rnd_now)
            val = max(-1, min(100, int(val)))
            data_in['val'] = val

            return

        ids = self.tel_ids if (tel_id_in is None) else tel_id_in

        pipe = self.redis.get_pipe()

        n_rnd_props_0 = 3
        n_rnd_props_1 = 10

        for id_now in self.tel_ids:
            random.shuffle(rnd_props)

            # call the randomization function
            if id_now in ids:
                # randomise the main metrics
                for prop_name in rnd_props:
                    if prop_name not in self.inst_health[id_now]:
                        continue

                    set_rnd_props(self.inst_health[id_now][prop_name])
                    # if id_now == 'Lx01':
                    #     print (id_now, prop_name, '\n', self.inst_health[id_now][prop_name])

                    # sync with the value in self.inst_health_s0
                    prop_value = self.inst_health[id_now][prop_name]['val']
                    self.inst_health_s0[id_now][prop_name] = prop_value
                    pipe.h_set(name='inst_health_summary;' + str(id_now),
                               key=prop_name,
                               data=prop_value)

                # randomise the list of full health
                inst_health_deep = self.redis.h_get_all(
                    name='inst_health_deep;' + str(id_now),
                    default_val={},
                )

                # select a sub sample of properties to randomise
                props_0 = inst_health_deep.keys()
                props_0 = random.sample(props_0,
                                        min(n_rnd_props_0,
                                            len(props_0) - 1))

                # for debugging, always randomise this particular property
                if id_now == 'Lx01' and 'camera_1_0' not in props_0:
                    if 'camera_1_0' in inst_health_deep.keys():
                        props_0 += ['camera_1_0']

                for n_prop_0 in range(len(props_0)):
                    prop_0_name = props_0[n_prop_0]
                    props_1 = inst_health_deep[prop_0_name]

                    # select a random sub sample of properties
                    # by picking their indices in the list
                    update_indices = random.sample(
                        range(len(props_1) - 1),
                        min(n_rnd_props_1,
                            len(props_1) - 1),
                    )
                    # randomise in-place the val of the selected properties
                    for n_prop_1 in range(len(update_indices)):
                        prop_1_name = update_indices[n_prop_1]
                        rnd_val(props_1[prop_1_name])

                    pipe.h_set(
                        name='inst_health_deep;' + str(id_now),
                        key=props_0[n_prop_0],
                        data=props_1,
                    )

                pipe.execute()

                self.set_tel_health_s1(id_now)

            time_now_sec = self.clock_sim.get_time_now_sec()
            time_min = self.clock_sim.get_time_series_start_time_sec()

            base_name = 'inst_health_summary;' + str(id_now)

            for key, val in self.inst_health_sub_flat[id_now].items():
                if 'val' in val['data']:
                    pipe.h_set(name=base_name,
                               key=key,
                               data=val['data']['val'])
                    pipe.z_add(name=base_name + ';' + key,
                               score=time_now_sec,
                               data={
                                   'time_sec': time_now_sec,
                                   'value': val['data']['val'],
                               },
                               clip_score=time_min)

            pipe.execute()

        # # self.redis.z_get('inst_health_summary;Lx03;camera_1')
        # data = self.redis.z_get('inst_health_summary;Lx03;camera_1')
        # print('----------->', data)
        # raise Exception('aaaaa aaaaaaaaaa')

        return

    # ------------------------------------------------------------------
    def loop_main(self):
        self.log.info([['g', ' - starting InstHealth.loop_main ...']])
        sleep(0.1)

        n_loop = 0
        rnd_seed = 12564654
        while self.can_loop():
            n_loop += 1
            sleep(self.loop_sleep_sec)
            if n_loop % self.loop_act_rate != 0:
                continue

            need_update = self.clock_sim.need_data_update(
                update_opts=self.check_update_opts, )
            if not need_update:
                continue

            with InstHealth.lock:
                self.rand_once(rnd_seed=rnd_seed)
                rnd_seed += 1

        self.log.info([['c', ' - ending InstHealth.loop_main ...']])

        return
Пример #25
0
class Manager():
    """class for running asynchronous services
    """

    # ------------------------------------------------------------------
    def __init__(self):
        self.class_name = self.__class__.__name__

        self.app_name = 'data_manager'
        settings = parse_args(app_name=self.app_name)

        # southern or northen CTA sites have different telescope configurations
        site_type = settings['site_type']
        # the address for the site
        app_host = settings['app_host']
        # local log file location
        log_file = settings['log_file']
        # logging level
        log_level = settings['log_level']
        # the port for the site
        app_port = settings['app_port']
        # the redis port use for this site
        self.redis_port = settings['redis_port']
        # define the prefix to all urls (must be non-empy string)
        app_prefix = settings['app_prefix']
        # is this a simulation
        is_simulation = settings['is_simulation']
        # development mode
        debug_opts = settings['debug_opts']
        # do we flush redis on startup
        self.do_flush_redis = settings['do_flush_redis']

        # instantiate the general settings class (must come first!)
        self.base_config = BaseConfig(
            site_type=site_type,
            redis_port=self.redis_port,
            app_port=app_port,
            app_prefix=app_prefix,
            app_host=app_host,
            log_level=log_level,
            websocket_route=None,
            allow_panel_sync=None,
            debug_opts=debug_opts,
            is_simulation=is_simulation,
        )

        self.log = LogParser(
            base_config=self.base_config,
            title=__name__,
            log_level=log_level,
            log_file=log_file,
        )

        self.redis = RedisManager(
            name=self.class_name, base_config=self.base_config, log=self.log
        )

        return

    # ------------------------------------------------------------------
    def cleanup_services(self, service_name, is_verb=False):
        """graceful exit of services
        """

        # is_verb = True
        if is_verb:
            self.log.info([
                ['c', ' - Manager.service_cleanup for '],
                ['y', service_name],
                ['b', ' ...'],
            ])

        # service_manager = ServiceManager()
        # service_manager.unset_active_instance(parent=self, class_prefix=service_name)
        ServiceManager.unset_active_instance(parent=self, service_name=service_name)

        # if service_name == 'clock_sim_service':
        #     pass

        return

    # ------------------------------------------------------------------
    def run_service(self, service_name, interrupt_sig):
        """run services in individual processes
        """

        # ------------------------------------------------------------------
        # simaple locker test (should be moved to unit tests...)
        check_lock = False
        # check_lock = True
        if check_lock:
            lock_prefix = 'utils;lock;' + 'test' + ';'
            # dynamic lock names, based on the current properties
            lock_namespace = {
                'loop': lambda: 'service_loop',
            }
            # initialise the lock manager
            locker = LockManager(
                log=self.log,
                redis=self.redis,
                base_config=self.base_config,
                lock_namespace=lock_namespace,
                lock_prefix=lock_prefix,
                is_passive=True,
            )

            self.log.info([['o', ' -- trying to acquire locks for '], ['c',
                                                                       service_name]])
            with locker.locks.acquire(names='loop', debug=True, can_exist=False):
                self.log.info([['y', ' --   now i am locked 0 ... '], ['c',
                                                                       service_name]])
                sleep(0.3)
                with locker.locks.acquire(names='loop', debug=True, can_exist=True):
                    self.log.info([['y', '   -- now i am locked 1 ... '],
                                   ['c', service_name]])
                    sleep(0.2)
            self.log.info([['o', ' -- released locks for '], ['c', service_name]])
        # ------------------------------------------------------------------

        # in case of backlog from a previous reload, call the cleanup for good measure
        self.cleanup_services(service_name)

        self.log.info([
            ['g', ' - starting services for '],
            ['y', service_name],
            ['g', ' \t(pid: '],
            ['p', os.getpid()],
            ['g', ') ...'],
        ])

        # set the list of telescopes for this particular site and attach it to base_config
        InstData(base_config=self.base_config)

        # set passive instance of the clock class, to add access functions base_config
        ClockSim(
            base_config=self.base_config,
            interrupt_sig=interrupt_sig,
            is_passive=True,
        )

        # for debugging, override the global flag
        # self.do_flush_redis = True
        if service_name == 'redis_flush':
            if self.do_flush_redis:
                self.log.warn([['bb', ' --- flusing redis --- ']])
                self.redis.flush()

        elif service_name == 'redis_services':
            # prefix for all lock names in redis
            lock_prefix = 'utils;lock;' + service_name + ';'
            # dynamic lock names, based on the current properties
            lock_namespace = {
                'loop': lambda: 'service_loop',
            }

            LockManager(
                log=self.log,
                redis=self.redis,
                base_config=self.base_config,
                lock_namespace=lock_namespace,
                lock_prefix=lock_prefix,
                is_passive=False,
                interrupt_sig=interrupt_sig,
                service_name=service_name,
            )

        # ------------------------------------------------------------------
        # ------------------------------------------------------------------
        elif service_name == 'time_of_night_service':
            # start the time_of_night clock (to be phased out....)
            utils.time_of_night(
                base_config=self.base_config,
                service_name=service_name,
                interrupt_sig=interrupt_sig,
            )
        # ------------------------------------------------------------------
        # ------------------------------------------------------------------

        elif service_name == 'clock_sim_service':
            ClockSim(
                base_config=self.base_config,
                interrupt_sig=interrupt_sig,
                is_passive=False,
            )

        elif service_name == 'inst_health_service':
            InstHealth(
                base_config=self.base_config,
                service_name=service_name,
                interrupt_sig=interrupt_sig,
            )

        elif service_name == 'inst_pos_service':
            InstPos(
                base_config=self.base_config,
                service_name=service_name,
                interrupt_sig=interrupt_sig,
            )

        elif service_name == 'scheduler_service':
            if has_acs:
                raise Exception(
                    'threading has not been properly updated for the acs version....'
                )
                SchedulerACS(
                    base_config=self.base_config,
                    service_name=service_name,
                    interrupt_sig=interrupt_sig,
                )
            else:
                MockTarget(base_config=self.base_config)
                SchedulerStandalone(
                    base_config=self.base_config,
                    service_name=service_name,
                    interrupt_sig=interrupt_sig,
                )

        else:
            raise Exception('unknown service_name ?!?', service_name)

        # all service classes inherit from ServiceManager, which keeps track of
        # all thread.  after initialising all classes, start the threads (blocking action)
        ServiceManager.run_threads()

        # after interrupt_sig has released the block from outside
        # of this process, do some cleanup
        self.cleanup_services(service_name)

        return
Пример #26
0
class SchedulerStandalone(ServiceManager):
    """scheduler simulation class, simulating the execution of scheduling blocks

       Only a single active instance is allowed to exist
    """

    lock = Lock()

    # ------------------------------------------------------------------
    def __init__(self, base_config, service_name, interrupt_sig):
        self.class_name = self.__class__.__name__
        service_name = (service_name if service_name is not None else self.class_name)
        super().__init__(service_name=service_name)

        self.log = LogParser(base_config=base_config, title=__name__)
        self.log.info([['g', ' - starting SchedulerStandalone ...']])

        self.base_config = base_config
        self.site_type = self.base_config.site_type
        self.clock_sim = self.base_config.clock_sim
        self.inst_data = self.base_config.inst_data

        self.service_name = service_name
        self.interrupt_sig = interrupt_sig

        self.tel_ids = self.inst_data.get_inst_ids(inst_types=['LST', 'MST', 'SST'])
        self.sub_array_insts = self.inst_data.get_sub_array_insts()

        self.no_sub_arr_name = self.base_config.no_sub_arr_name

        self.redis = RedisManager(
            name=self.class_name, base_config=self.base_config, log=self.log
        )

        self.debug = not True
        self.expire_sec = 86400 * 2  # two days
        # self.expire_sec = 5

        # self.max_n_obs_block = 4 if self.site_type == 'N' else 7
        # self.max_n_obs_block = min(self.max_n_obs_block, floor(len(self.tel_ids) / 4))

        # sleep duration for thread loops
        self.loop_sleep_sec = 1
        # minimal real-time delay between randomisations (once every self.loop_act_rate sec)
        self.loop_act_rate = max(int(2 / self.loop_sleep_sec), 1)

        self.max_n_cycles = 100
        self.min_n_sched_block = 2  # 2
        self.max_n_sched_block = 5  # 5
        self.min_n_obs_block = 1
        self.max_n_obs_block = 5
        self.min_n_tel_block = 4
        self.max_n_free_tels = 5

        self.name_prefix = get_rnd(n_digits=5, out_type=str)

        self.az_min_max = [-180, 180]
        self.zen_min_max_tel = [0, 70]
        self.zen_min_max_pnt = [0, 20]

        self.phases_exe = {
            'start': [
                'run_config_mount', 'run_config_camera', 'run_config_DAQ',
                'run_config_mirror'
            ],
            'during': ['run_take_data'],
            'finish': ['run_finish_mount', 'run_finish_camera', 'run_finish_cleanup'],
        }

        self.error_rnd_frac = {
            'E1': 0.3,
            'E2': 0.4,
            'E3': 0.5,
            'E4': 0.6,
            'E5': 0.7,
            'E6': 0.8,
            'E7': 0.9,
            'E8': 1,
        }

        self.phase_rnd_frac = {
            'start': 0.29,
            'finish': 0.1,
            'cancel': 0.06,
            'fail': 0.1,
        }

        # 1800 = 30 minutes
        self.obs_block_sec = 1800

        self.n_init_cycle = -1
        self.n_nights = -1

        self.update_name = 'obs_block_update'
        self.sched_block_prefix = 'sched_block_'
        self.obs_block_prefix = 'obs_block_'

        rnd_seed = get_rnd_seed()
        self.rnd_gen = Random(rnd_seed)

        self.external_clock_events = []
        external_generate_clock_events(self)

        self.redis.delete(self.update_name)

        self.init()

        # make sure this is the only active instance
        self.init_active_instance()

        self.setup_threads()

        return

    # ------------------------------------------------------------------
    def setup_threads(self):

        self.add_thread(target=self.loop_main)

        return

    # ------------------------------------------------------------------
    def init(self):
        debug_tmp = False
        # debug_tmp = True

        self.exe_phase = dict()
        self.all_obs_blocks = []
        self.external_events = []

        self.n_nights = self.clock_sim.get_n_nights()
        night_start_sec = self.clock_sim.get_night_start_sec()
        night_end_sec = self.clock_sim.get_night_end_sec()
        night_duration_sec = self.clock_sim.get_night_duration_sec()

        self.n_init_cycle += 1

        is_cycle_done = False
        n_cycle_now = 0
        n_sched_block = -1
        overhead_sec = self.obs_block_sec * 0.05

        tot_sched_duration_sec = night_start_sec
        max_block_duration_sec = night_end_sec - self.obs_block_sec

        pipe = self.redis.get_pipe()

        while True:
            can_break = not ((tot_sched_duration_sec < max_block_duration_sec) and
                             (n_cycle_now < self.max_n_cycles) and (not is_cycle_done))
            if can_break:
                break

            base_cycle_name = (
                self.name_prefix + '_' + str(self.n_init_cycle) + '_' + str(n_cycle_now)
                + '_'
            )
            n_cycle_now += 1

            # derive a random combination of sub-arrays which do not
            # conflict with each other
            sub_array_ids = list(self.sub_array_insts.keys())
            n_sa_0 = self.rnd_gen.randint(0, len(sub_array_ids) - 1)
            sa_id_0 = sub_array_ids[n_sa_0]

            allowed_sa_ids = self.inst_data.get_allowed_sub_arrays(sa_id_0)

            sa_ids = [sa_id_0]
            while len(allowed_sa_ids) > 0:
                # select a random id from the allowed list of the initial sa
                check_n_sa = self.rnd_gen.randint(0, len(allowed_sa_ids) - 1)
                sa_id_add = allowed_sa_ids[check_n_sa]
                allowed_sa_ids.remove(sa_id_add)

                # check if this id is allowed by all included sas
                check_sa_ids = []
                for sa_id in sa_ids:
                    check_sa_ids_now = self.inst_data.get_allowed_sub_arrays(sa_id)
                    check_sa_ids += [int(sa_id_add in check_sa_ids_now)]

                # add the new sa if it is allowed by all
                if sum(check_sa_ids) == len(check_sa_ids):
                    sa_ids += [sa_id_add]

            if debug_tmp:
                precent = (tot_sched_duration_sec - night_start_sec) / night_duration_sec
                print()
                print('-' * 100)
                print(
                    ' -    n_nights/n_cycle_now',
                    [self.n_nights, n_cycle_now],
                    'tot_sched_duration_sec / percentage:',
                    [tot_sched_duration_sec, int(100 * precent)],
                )

            sched_block_duration_sec = []

            # for n_sched_block_now in range(n_cycle_sched_blocks):
            for n_sched_block_now in range(len(sa_ids)):
                sched_block_id = (
                    self.sched_block_prefix + base_cycle_name + str(n_sched_block_now)
                )

                n_sched_block += 1

                sa_id = sa_ids[n_sched_block_now]
                tel_ids = self.sub_array_insts[sa_id]
                n_tel_now = len(tel_ids)

                if debug_tmp:
                    print(' --   sub-array:', sa_id, '\n', ' ' * 15, tel_ids)

                # choose the number of obs blocks inside these blocks
                n_obs_blocks = self.rnd_gen.randint(
                    self.min_n_obs_block, self.max_n_obs_block
                )

                if debug_tmp:
                    print(
                        ' ---  n_sched_block:', n_sched_block,
                        ' ---  n_sched_block_now / n_tel_now:', n_sched_block_now,
                        n_tel_now, '-------', sched_block_id
                    )

                tot_obs_block_duration_sec = 0
                block_duration_sec = tot_sched_duration_sec

                targets = get_rnd_targets(
                    self=self,
                    night_duration_sec=night_duration_sec,
                    block_duration_sec=block_duration_sec,
                )

                for n_obs_now in range(n_obs_blocks):
                    obs_block_id = (
                        self.obs_block_prefix + base_cycle_name + str(n_sched_block_now)
                        + '_' + str(n_obs_now)
                    )

                    obs_block_name = (str(n_sched_block) + ' (' + str(n_obs_now) + ')')

                    self.exe_phase[obs_block_id] = ''

                    rnd = self.rnd_gen.random()
                    obs_block_sec = self.obs_block_sec
                    if rnd < 0.05:
                        obs_block_sec /= 1.8
                    elif rnd < 0.3:
                        obs_block_sec /= 1.5
                    elif rnd < 0.5:
                        obs_block_sec /= 1.1
                    obs_block_sec = int(floor(obs_block_sec))

                    planed_block_end_sec = block_duration_sec + obs_block_sec
                    is_cycle_done = (planed_block_end_sec > night_end_sec)
                    if is_cycle_done:
                        if debug_tmp:
                            print(
                                ' - is_cycle_done - ',
                                'n_obs_now / start_time_sec / duration:', n_obs_now,
                                block_duration_sec, obs_block_sec
                            )
                        break

                    # integrated time for all obs blocks within this sched block
                    tot_obs_block_duration_sec += obs_block_sec

                    pointings = get_rnd_pointings(
                        self=self,
                        tel_ids=tel_ids,
                        targets=targets,
                        sched_block_id=sched_block_id,
                        obs_block_id=obs_block_id,
                        n_obs_now=n_obs_now,
                    )

                    if debug_tmp:
                        print(
                            ' ---- n_obs_now / start_time_sec / duration:',
                            n_obs_now,
                            block_duration_sec,
                            obs_block_sec,
                            '-------',
                            obs_block_id,
                        )

                    time = {
                        'start': block_duration_sec,
                        'duration': obs_block_sec - overhead_sec,
                    }
                    time['end'] = time['start'] + time['duration']

                    exe_state = {'state': 'wait', 'can_run': True}

                    metadata = {
                        'n_sched': n_sched_block,
                        'n_obs': n_obs_now,
                        'block_name': obs_block_name
                    }

                    telescopes = {
                        'large': {
                            'min':
                            int(len(list(filter(lambda x: 'L' in x, tel_ids))) / 2),
                            'max': 4,
                            'ids': list(filter(lambda x: 'L' in x, tel_ids))
                        },
                        'medium': {
                            'min':
                            int(len(list(filter(lambda x: 'M' in x, tel_ids))) / 2),
                            'max': 25,
                            'ids': list(filter(lambda x: 'M' in x, tel_ids))
                        },
                        'small': {
                            'min':
                            int(len(list(filter(lambda x: 'S' in x, tel_ids))) / 2),
                            'max': 70,
                            'ids': list(filter(lambda x: 'S' in x, tel_ids))
                        }
                    }

                    block = {
                        'sched_block_id': sched_block_id,
                        'obs_block_id': obs_block_id,
                        'time': time,
                        'metadata': metadata,
                        'timestamp': get_time('msec'),
                        'telescopes': telescopes,
                        'exe_state': exe_state,
                        'run_phase': [],
                        'targets': targets,
                        'pointings': pointings,
                        'tel_ids': tel_ids,
                    }

                    pipe.set(
                        name=block['obs_block_id'],
                        data=block,
                        expire_sec=self.expire_sec
                    )

                    self.all_obs_blocks.append(block)

                    block_duration_sec += obs_block_sec

                # list of duration of all sched blocks within this cycle
                if tot_obs_block_duration_sec > 0:  # timedelta(seconds = 0):
                    sched_block_duration_sec += [tot_obs_block_duration_sec]

            # the maximal duration of all blocks within this cycle
            tot_sched_duration_sec += max(sched_block_duration_sec)

        pipe.set(name='external_events', data=self.external_events)
        pipe.set(name='external_clock_events', data=self.external_clock_events)

        pipe.execute()

        self.update_exe_statuses()

        return

    # ------------------------------------------------------------------
    def get_obs_block_template(self):
        """temporary hardcoded dict......
        """

        # generated with:
        #   print jsonAcs.encode(jsonAcs.classFactory.defaultValues[sb.ObservationBlock])

        template = {
            'py/object': 'sb.ObservationBlock',
            'src': {
                'py/object': 'sb.Source',
                'proposal_priority': {
                    'py/object': 'sb.High'
                },
                'proposal_type': {
                    'py/object': 'sb.placeholder'
                },
                'region_of_interest': {
                    'py/object': 'sb.RegionOfInterest',
                    'circle_radius': 100
                },
                'coords': {
                    'py/object': 'sb.Coordinates',
                    'equatorial': {
                        'py/object': 'sb.EquatorialCoordinates',
                        'dec': 4,
                        'ra': 2
                    }
                },
                'id': 'source',
                'observing_mode': {
                    'py/object': 'sb.ObservingMode',
                    'slewing_': {
                        'py/object': 'sb.Slewing',
                        'take_data': 1
                    },
                    'observing_type': {
                        'py/object': 'sb.ObservingType',
                        'wobble_': {
                            'py/object': 'sb.Wobble',
                            'angle': 1,
                            'offset': 1
                        }
                    }
                }
            },
            'observing_conditions': {
                'py/object': 'sb.ObservingConditions',
                'quality_': {
                    'py/object': 'sb.Quality',
                    'illumination': 1,
                    'min_nsb_range': 1,
                    'max_nsb_range': 1
                },
                'start_time_sec': {
                    'py/object': 'sb.DateTime',
                    'placeholder': 1
                },
                'weather_': {
                    'py/object': 'sb.Weather',
                    'wind_speed': 1,
                    'precision_pointing': 1,
                    'cloudiness': 1,
                    'humidity': 1
                },
                'duration': 0,
                'tolerance': 1
            },
            'max_script_duration': 0,
            'script_id': 'script_id',
            'id': 'ob_id'
        }

        return template

    # ------------------------------------------------------------------
    def wait_to_run(self):
        """move one from wait to run
        """

        # time_now_sec = self.time_of_night.get_current_time()
        time_now_sec = self.clock_sim.get_time_now_sec()

        # move to run state
        wait_blocks = [
            x for x in self.all_obs_blocks if (x['exe_state']['state'] == 'wait')
        ]

        pipe = self.redis.get_pipe()

        has_change = False
        for block in wait_blocks:
            time_comp = (
                block['time']['start'] - (self.loop_sleep_sec * self.loop_act_rate)
            )
            if time_now_sec < time_comp:
                # datetime.strptime(block['start_time_sec'], '%Y-%m-%d %H:%M:%S'):
                # - deltatime((self.loop_sleep_sec * self.loop_act_rate))
                continue

            block['exe_state']['state'] = 'run'

            self.exe_phase[block['obs_block_id']] = 'start'
            block['run_phase'] = copy.deepcopy(self.phases_exe['start'])

            has_change = True
            pipe.set(name=block['obs_block_id'], data=block, expire_sec=self.expire_sec)

        if has_change:
            pipe.execute()

            # check for blocks which cant begin as their time is already past
            wait_blocks = [
                x for x in self.all_obs_blocks if x['exe_state']['state'] == 'wait'
            ]

            has_change = False
            for block in wait_blocks:
                # # adjust the starting/ending time
                # block['end_time_sec'] = block['start_time_sec'] + block['duration']

                is_over_time = time_now_sec >= block['time']['end']
                is_rnd_stop = (
                    self.rnd_gen.random() < self.phase_rnd_frac['cancel'] * 0.1
                )
                if is_over_time or is_rnd_stop:

                    block['exe_state']['state'] = 'cancel'
                    if self.rnd_gen.random() < self.error_rnd_frac['E1']:
                        block['exe_state']['error'] = 'E1'
                    elif self.rnd_gen.random() < self.error_rnd_frac['E2']:
                        block['exe_state']['error'] = 'E2'
                    elif self.rnd_gen.random() < self.error_rnd_frac['E3']:
                        block['exe_state']['error'] = 'E3'
                    elif self.rnd_gen.random() < self.error_rnd_frac['E4']:
                        block['exe_state']['error'] = 'E4'
                    elif self.rnd_gen.random() < self.error_rnd_frac['E8']:
                        block['exe_state']['error'] = 'E8'

                    block['exe_state']['can_run'] = False

                    block['run_phase'] = []

                    self.exe_phase[block['obs_block_id']] = ''

                    has_change = True
                    pipe.set(
                        name=block['obs_block_id'],
                        data=block,
                        expire_sec=self.expire_sec,
                    )

            if has_change:
                pipe.execute()

        return

    # ------------------------------------------------------------------
    def run_phases(self):
        """progress run phases
        """

        time_now_sec = self.clock_sim.get_time_now_sec()

        runs = [x for x in self.all_obs_blocks if (x['exe_state']['state'] == 'run')]

        pipe = self.redis.get_pipe()

        has_change = False
        for block in runs:
            phase = self.exe_phase[block['obs_block_id']]
            if phase == '':
                continue

            for phase_now in self.phases_exe[phase]:
                if phase_now in block['run_phase']:

                    if phase_now in self.phases_exe['start']:
                        is_done = (self.rnd_gen.random() < self.phase_rnd_frac['start'])
                        # if is_done:
                        #   block['end_time_sec'] = block['start_time_sec'] + block['duration']

                    elif phase_now in self.phases_exe['during']:
                        is_done = (
                            time_now_sec >= (
                                block['time']['end'] -
                                block['time']['duration'] * self.phase_rnd_frac['finish']
                            )
                        )  # (datetime.strptime(block['end_time_sec'], '%Y-%m-%d %H:%M:%S') - timedelta(seconds = int(block['duration']) * self.phase_rnd_frac['finish'])))

                    else:
                        is_done = (
                            time_now_sec >= block['time']['end']
                        )  # is_done = (time_now_sec >= datetime.strptime(block['end_time_sec'], '%Y-%m-%d %H:%M:%S'))

                    if is_done:
                        block['run_phase'].remove(phase_now)
                    # print is_done,block['run_phase']

            if len(block['run_phase']) == 0:
                next_phase = ''
                if phase == 'start':
                    next_phase = 'during'
                elif phase == 'during':
                    next_phase = 'finish'

                if next_phase in self.phases_exe:
                    block['run_phase'] = copy.deepcopy(self.phases_exe[next_phase])

                self.exe_phase[block['obs_block_id']] = next_phase

            has_change = True
            pipe.set(name=block['obs_block_id'], data=block, expire_sec=self.expire_sec)

        if has_change:
            pipe.execute()

        return

    # ------------------------------------------------------------------
    def run_to_done(self):
        """move one from run to done
        """

        # time_now_sec = self.time_of_night.get_current_time()
        time_now_sec = self.clock_sim.get_time_now_sec()

        runs = [x for x in self.all_obs_blocks if x['exe_state']['state'] == 'run']

        pipe = self.redis.get_pipe()

        has_change = False
        for block in runs:
            if time_now_sec < block['time']['end']:
                continue

            if self.rnd_gen.random() < self.phase_rnd_frac['cancel']:
                block['exe_state']['state'] = 'cancel'
                if self.rnd_gen.random() < self.error_rnd_frac['E1']:
                    block['exe_state']['error'] = 'E1'
                elif self.rnd_gen.random() < self.error_rnd_frac['E2']:
                    block['exe_state']['error'] = 'E2'
                elif self.rnd_gen.random() < self.error_rnd_frac['E3']:
                    block['exe_state']['error'] = 'E3'
                elif self.rnd_gen.random() < self.error_rnd_frac['E4']:
                    block['exe_state']['error'] = 'E4'
                elif self.rnd_gen.random() < self.error_rnd_frac['E8']:
                    block['exe_state']['error'] = 'E8'

            elif self.rnd_gen.random() < self.phase_rnd_frac['fail']:
                block['exe_state']['state'] = 'fail'
                if self.rnd_gen.random() < self.error_rnd_frac['E1']:
                    block['exe_state']['error'] = 'E1'
                elif self.rnd_gen.random() < self.error_rnd_frac['E2']:
                    block['exe_state']['error'] = 'E2'
                elif self.rnd_gen.random() < self.error_rnd_frac['E3']:
                    block['exe_state']['error'] = 'E3'
                elif self.rnd_gen.random() < self.error_rnd_frac['E4']:
                    block['exe_state']['error'] = 'E4'
                elif self.rnd_gen.random() < self.error_rnd_frac['E8']:
                    block['exe_state']['error'] = 'E8'

            else:
                block['exe_state']['state'] = 'done'

            block['run_phase'] = []

            has_change = True
            pipe.set(name=block['obs_block_id'], data=block, expire_sec=self.expire_sec)

            self.exe_phase[block['obs_block_id']] = ''

        if has_change:
            pipe.execute()

        return

    # ------------------------------------------------------------------
    def update_exe_statuses(self):
        """update the exeStatus lists in redis
        """

        blocks_run = []
        obs_block_ids = {'wait': [], 'run': [], 'done': [], 'cancel': [], 'fail': []}

        pipe = self.redis.get_pipe()

        for block in self.all_obs_blocks:
            obs_block_id = block['obs_block_id']
            exe_state = block['exe_state']['state']

            if self.redis.exists(obs_block_id):
                obs_block_ids[exe_state].append(obs_block_id)

                if exe_state == 'run':
                    blocks_run += [block]

        for key, val in obs_block_ids.items():
            pipe.set(name='obs_block_ids_' + key, data=val)

        pipe.execute()

        update_sub_arrs(self=self, blocks=blocks_run)

        return

    # # ------------------------------------------------------------------
    # def update_sub_arrs(self, blocks=None):
    #     pipe = self.redis.get_pipe()
    #     if blocks is None:
    #         obs_block_ids = self.redis.get(
    #             name=('obs_block_ids_' + 'run'), default_val=[]
    #         )
    #         for obs_block_id in obs_block_ids:
    #             pipe.get(obs_block_id)

    #         blocks = pipe.execute()

    #     #
    #     sub_arrs = []
    #     all_tel_ids = copy.deepcopy(self.tel_ids)

    #     for n_block in range(len(blocks)):
    #         block_tel_ids = (
    #             blocks[n_block]['telescopes']['large']['ids']
    #             + blocks[n_block]['telescopes']['medium']['ids']
    #             + blocks[n_block]['telescopes']['small']['ids']
    #         )
    #         pnt_id = blocks[n_block]['pointings'][0]['id']
    #         pointing_name = blocks[n_block]['pointings'][0]['name']

    #         # compile the telescope list for this block
    #         tels = []
    #         for id_now in block_tel_ids:
    #             tels.append({'id': id_now})

    #             if id_now in all_tel_ids:
    #                 all_tel_ids.remove(id_now)

    #         # add the telescope list for this block
    #         sub_arrs.append({'id': pnt_id, 'N': pointing_name, 'children': tels})

    #     # ------------------------------------------------------------------
    #     # now take care of all free telescopes
    #     # ------------------------------------------------------------------
    #     tels = []
    #     for id_now in all_tel_ids:
    #         tels.append({'id': id_now})

    #     sub_arrs.append({'id': self.no_sub_arr_name, 'children': tels})

    #     # ------------------------------------------------------------------
    #     # for now - a simple/stupid solution, where we write the sub-arrays and publish each
    #     # time, even if the content is actually the same ...
    #     # ------------------------------------------------------------------
    #     self.redis.set(name='sub_arrs', data=sub_arrs)
    #     self.redis.publish(channel='sub_arrs')

    #     return

    # ------------------------------------------------------------------
    def external_add_new_redis_blocks(self):
        obs_block_update = self.redis.get(self.update_name, default_val=None)
        if obs_block_update is None:
            return

        pipe = self.redis.get_pipe()

        # for key in self.all_obs_blocks[0]:
        #     self.log.info([['g', key, self.all_obs_blocks[0][key]]])

        # self.log.info([['g', obs_block_update]])
        self.log.info([['g', len(obs_block_update), len(self.all_obs_blocks)]])

        total = 0
        for n_block in range(len(obs_block_update)):
            if self.redis.exists(obs_block_update[n_block]['obs_block_id']):
                # for x in self.all_obs_blocks:
                #     if x['obs_block_id'] == obs_block_update[n_block]['obs_block_id']:
                #         current = [x][0]

                current = [
                    x for x in self.all_obs_blocks
                    if x['obs_block_id'] == obs_block_update[n_block]['obs_block_id']
                ]
                if len(current) == 0:
                    current = obs_block_update[n_block]
                    self.all_obs_blocks.append(current)
                    # for key in obs_block_update[n_block]:
                    #     self.log.info([['g', key, obs_block_update[n_block][key]]])
                else:
                    current = current[0]
                if current['exe_state']['state'] not in ['wait', 'run']:
                    continue

                total += 1

                pipe.set(
                    name=obs_block_update[n_block]['obs_block_id'],
                    data=obs_block_update[n_block],
                    expire_sec=self.expire_sec,
                )
                current = obs_block_update[n_block]

            else:
                self.all_obs_blocks.append(obs_block_update[n_block])
                pipe.set(
                    name=obs_block_update[n_block]['obs_block_id'],
                    data=obs_block_update[n_block],
                    expire_sec=self.expire_sec,
                )

        self.update_exe_statuses()
        # for block in self.all_obs_blocks:
        #     exe_state = block['exe_state']['state']
        #     self.log.info([['g', block['metadata']['block_name'] + ' ' + exe_state]])

        pipe.delete(self.update_name)
        pipe.execute()

        self.log.info([['g', total, len(obs_block_update), len(self.all_obs_blocks)]])

        return

    # ------------------------------------------------------------------
    def loop_main(self):
        self.log.info([['g', ' - starting SchedulerStandalone.loop_main ...']])
        sleep(0.1)

        n_loop = 0
        while self.can_loop():
            n_loop += 1
            sleep(self.loop_sleep_sec)
            if n_loop % self.loop_act_rate != 0:
                continue

            with SchedulerStandalone.lock:
                if self.n_nights < self.clock_sim.get_n_nights():
                    self.init()
                else:
                    self.external_add_new_redis_blocks()
                    wait_blocks = [
                        x for x in self.all_obs_blocks
                        if (x['exe_state']['state'] == 'wait')
                    ]
                    runs = [
                        x for x in self.all_obs_blocks
                        if (x['exe_state']['state'] == 'run')
                    ]

                    if len(wait_blocks) + len(runs) == 0:
                        self.init()
                    else:
                        self.wait_to_run()
                        self.run_phases()
                        self.run_to_done()
                        external_generate_events(self)

                self.update_exe_statuses()

        self.log.info([['c', ' - ending SchedulerStandalone.loop_main ...']])

        return
Пример #27
0
class BaseWidget():
    # all session ids for this user/widget
    widget_group_sess = dict()

    # ------------------------------------------------------------------
    def __init__(self, widget_id=None, sm=None, *args, **kwargs):
        self.log = LogParser(base_config=sm.base_config, title=__name__)

        # the parent of this widget
        self.sm = sm
        # the shared basic configuration class
        self.base_config = self.sm.base_config
        self.sess_id = self.sm.sess_id
        self.user_id = self.sm.user_id

        # the id of this instance
        self.widget_id = widget_id
        # widget-class and widget group names
        self.widget_type = self.__class__.__name__
        # for common threading
        self.widget_group = self.sm.user_group_id + '_' + self.widget_type

        # redis interface
        self.redis = RedisManager(
            name=self.widget_type, base_config=self.base_config, log=self.log
        )

        # turn on periodic data updates
        self.do_data_updates = True
        # some etra logging messages for this module
        self.log_send_packet = False

        # fixed or dynamic icon
        self.n_icon = None
        # self.n_icon = -1
        # self.icon_id = -1

        # list of utility classes to loop over
        self.my_utils = dict()

        # arguments given to the setup function, to later be
        # passed to utils if needed
        self.setup_args = None

        return

    # ------------------------------------------------------------------
    async def setup(self, *args):
        self.setup_args = args

        widget_info = await self.sm.get_lazy_widget_info(
            sess_id=self.sess_id,
            widget_id=self.widget_id,
        )
        if widget_info is None:
            return

        if self.n_icon is None:
            self.n_icon = widget_info['n_icon']
            # self.icon_id = widget_info['icon_id']

        # override the global logging variable with a
        # name corresponding to the current session id
        self.log = LogParser(
            base_config=self.base_config,
            title=(
                str(self.user_id) + '/' + str(self.sm.sess_id) + '/' + __name__ + '/'
                + self.widget_id
            ),
        )

        return

    # ------------------------------------------------------------------
    async def util_setup(self, data):
        """load a utility class and set it up
        """

        util_id = data['util_id']
        util_type = data['util_type']

        self.log.debug([
            ['b', ' - util_setup: '],
            ['y', util_type],
            ['b', ' with '],
            ['y', util_id],
            ['b', ' to '],
            ['o', self.widget_id],
        ])

        widget_info = await self.sm.get_lazy_widget_info(
            sess_id=self.sess_id,
            widget_id=self.widget_id,
        )
        if widget_info is None:
            return

        if util_id not in widget_info['util_ids']:
            widget_info['util_ids'] += [util_id]

        # dynamic loading of the module
        util_source = self.sm.util_module_dir + '.' + util_type
        util_module = importlib.import_module(util_source, package=None)
        util_cls = getattr(util_module, util_type)

        # instantiate the util class
        self.my_utils[util_id] = util_cls(util_id=util_id, parent=self)

        # add a lock for initialisation. needed in case of restoration
        # of sessions, in order to make sure the initialisation
        # method is called before any other
        expire_sec = self.my_utils[util_id].sm.get_expite_sec(name='widget_init_expire')

        self.my_utils[util_id].locker.semaphores.add(
            name=self.get_util_lock_name(util_id),
            key=util_id,
            expire_sec=expire_sec,
        )

        # run the setup function of the util
        await self.my_utils[util_id].setup(self.setup_args)

        self.redis.h_set(name='ws;widget_info', key=self.widget_id, data=widget_info)

        return

    # ------------------------------------------------------------------
    async def util_func(self, data):
        """execute util methods following client events
        """

        util_id = data['util_id']
        method_name = data['method_name']
        method_args = data['method_args'] if 'method_args' in data else dict()

        # is this the initialisation method
        is_init_func = (method_name == 'util_init')

        # in case this is the first call, the util should be loaded
        if util_id not in self.my_utils:
            await self.util_setup(data)

            # if the first call is not the initialisation method, this is probably
            # a restored session. in this case we send the client a request to
            # send the initialisation method request, and the current method call will
            # be blocked by the initialisation lock
            if not is_init_func:
                opt_in = {
                    'widget': self,
                    'event_name': ('ask_init_util;' + util_id),
                }
                await self.sm.emit_widget_event(opt_in=opt_in)

        # block non-initialisation calls if initialisation has not finished yet
        if not is_init_func:
            max_lock_sec = self.sm.get_expite_sec(
                name='widget_init_expire',
                is_lock_check=True,
            )
            await self.my_utils[util_id].locker.semaphores.async_block(
                is_locked=await self.is_util_init_locked(util_id),
                max_lock_sec=max_lock_sec,
            )

        # execute the requested method
        init_func = getattr(self.my_utils[util_id], method_name)
        await init_func(method_args)

        # remove the initialisation lock
        if is_init_func:
            self.my_utils[util_id].locker.semaphores.remove(
                name=self.get_util_lock_name(util_id),
                key=util_id,
            )

        return

    # ------------------------------------------------------------------
    def get_util_lock_name(self, util_id):
        """a unique name for initialisation locking
        """

        lock_name = (
            'ws;base_widget;util_func;' + self.my_utils[util_id].class_name + ';'
            + self.my_utils[util_id].util_id
        )

        return lock_name

    # ------------------------------------------------------------------
    async def is_util_init_locked(self, util_id):
        """a function which checks if the initialisation is locked
        """
        async def is_locked():
            locked = self.my_utils[util_id].locker.semaphores.check(
                name=self.get_util_lock_name(util_id),
                key=util_id,
            )
            return locked

        return is_locked

    # ------------------------------------------------------------------
    async def back_from_offline(self, *args):
        """interface function for back-from-offline events
        """

        # check if any util is missing (e.g., in case we are back
        # after a session recovery) and ask the client to respond

        widget_info = self.redis.h_get(name='ws;widget_info', key=self.widget_id)
        util_ids = widget_info['util_ids']
        util_ids = [u for u in util_ids if u not in self.my_utils.keys()]

        for util_id in util_ids:
            opt_in = {
                'widget': self,
                'event_name': ('ask_init_util;' + util_id),
            }
            await self.sm.emit_widget_event(opt_in=opt_in)

        # loop over utils
        for util_now in self.my_utils.values():
            await util_now.back_from_offline(args)

        return