Ejemplo n.º 1
0
class BufferedReader(Listener):
    """
    A BufferedReader is a subclass of :class:`~can.Listener` which implements a
    **message buffer**: that is, when the :class:`can.BufferedReader` instance is
    notified of a new message it pushes it into a queue of messages waiting to
    be serviced. The messages can then be fetched with
    :meth:`~can.BufferedReader.get_message`.

    Putting in messages after :meth:`~can.BufferedReader.stop` has be called will raise
    an exception, see :meth:`~can.BufferedReader.on_message_received`.

    :attr bool is_stopped: ``True`` iff the reader has been stopped
    """

    def __init__(self):
        # set to "infinite" size
        self.buffer = SimpleQueue()
        self.is_stopped = False

    def on_message_received(self, msg):
        """Append a message to the buffer.

        :raises: BufferError
            if the reader has already been stopped
        """
        if self.is_stopped:
            raise RuntimeError("reader has already been stopped")
        else:
            self.buffer.put(msg)

    def get_message(self, timeout=0.5):
        """
        Attempts to retrieve the latest message received by the instance. If no message is
        available it blocks for given timeout or until a message is received, or else
        returns None (whichever is shorter). This method does not block after
        :meth:`can.BufferedReader.stop` has been called.

        :param float timeout: The number of seconds to wait for a new message.
        :rytpe: can.Message or None
        :return: the message if there is one, or None if there is not.
        """
        try:
            return self.buffer.get(block=not self.is_stopped, timeout=timeout)
        except Empty:
            return None

    def stop(self):
        """Prohibits any more additions to this reader.
        """
        self.is_stopped = True
Ejemplo n.º 2
0
    def delete_eps_moves(self):
        grammar = self._grammar

        is_eps = {str(k): False for k in grammar.nonterminal}
        checked = {str(k): False for k in grammar.nonterminal}
        concerned_rules = {
            str(k): [
                i for i in range(len(grammar.rules))
                if k in grammar.rules[i].rhs
            ]
            for k in grammar.nonterminal
        }
        q = SimpleQueue()
        counter = {}
        eps_nonterms = []
        for rule in grammar.rules:
            str_rule = str(rule)
            counter[str_rule] = 0
            for sym in rule.rhs:
                if sym in grammar.nonterminal:
                    counter[str_rule] += 1

            if not counter[str_rule] and EPS_SYM in rule.rhs:
                q.put(rule.lhs)
                eps_nonterms.append(rule.lhs)
                is_eps[str(rule.lhs)] = True
                checked[str(rule.lhs)] = True

        while not q.empty():
            nonterm = q.get()
            checked[str(nonterm)] = True
            for rule in concerned_rules[str(nonterm)]:
                rule = grammar.rules[rule]
                counter[str(rule)] -= 1
                if not counter[str(rule)] and not checked[str(rule.lhs)]:
                    is_eps[str(rule.lhs)] = True
                    for x in rule.rhs:
                        if x in grammar.terminal and x != EPS_SYM:
                            is_eps[str(rule.lhs)] = False
                    if is_eps[str(rule.lhs)]:
                        q.put(rule.lhs)

        new_rules = []
        for rule in grammar.rules:
            if EPS_SYM in rule.rhs:
                continue
            rules_for_extend = [GrammarRule(rule.lhs, [])]
            for sym in rule.rhs:
                if sym in grammar.nonterminal:
                    if is_eps[str(sym)]:
                        if is_eps[str(rule.lhs)] and grammar.nonterminal.index(rule.lhs) \
                                < grammar.nonterminal.index(sym) and sym not in eps_nonterms:
                            rules_for_extend = [
                                GrammarRule(x.lhs, x.rhs + [sym])
                                for x in rules_for_extend
                            ]
                        else:
                            rules_for_extend = [
                                GrammarRule(rule.lhs, x.rhs + [y])
                                for x in rules_for_extend
                                for y in [sym, EPS_SYM]
                            ]
                    else:
                        rules_for_extend = [
                            GrammarRule(x.lhs, x.rhs + [sym])
                            for x in rules_for_extend
                        ]
                else:
                    rules_for_extend = [
                        GrammarRule(x.lhs, x.rhs + [sym])
                        for x in rules_for_extend
                    ]

            new_rules.extend(rules_for_extend)
        new_rules = [
            rule for rule in new_rules
            if len(rule.rhs) > 0 and EPS_SYM not in rule.rhs
        ]

        start_rules = [
            rule for rule in grammar.rules if rule.lhs == grammar.start_symbol
        ]
        for rule in start_rules:
            if EPS_SYM in rule.rhs:
                new_rules.append(rule)

        self._grammar.rules = new_rules
Ejemplo n.º 3
0
    def delete_left_rec(self):
        grammar = self._grammar

        new_rules = []
        new_nonterm = [x for x in grammar.nonterminal]
        # next char to @ - A
        next_char = GrammarSymbol(
            get_next_char('@', grammar.nonterminal + grammar.terminal))

        for nonterm in grammar.nonterminal:
            rules = [y for y in grammar.rules if y.lhs == nonterm]
            cleared_rules = []
            for rule in rules:
                q = SimpleQueue()
                q.put(rule)
                while not q.empty():
                    cur_rule = q.get()
                    if cur_rule.rhs[0] in grammar.terminal or (
                            new_nonterm.index(cur_rule.rhs[0]) >=
                            new_nonterm.index(cur_rule.lhs)):
                        cleared_rules.append(cur_rule)
                    else:
                        rules_to_insert = [
                            y.rhs + cur_rule.rhs[1:] for y in new_rules
                            if y.lhs == cur_rule.rhs[0]
                        ]
                        if not rules_to_insert:
                            rules_to_insert = [
                                y.rhs + cur_rule.rhs[1:] for y in grammar.rules
                                if y.lhs == cur_rule.rhs[0]
                            ]
                        for i in rules_to_insert:
                            q.put(GrammarRule(cur_rule.lhs, i))

            rules = cleared_rules

            rec_rules = [rule for rule in rules if nonterm == rule.rhs[0]]
            if rec_rules:
                com_rules = [rule for rule in rules if rule not in rec_rules]

                new_rules.extend([
                    GrammarRule(nonterm, rule.rhs + [next_char])
                    for rule in com_rules
                ])
                new_rules.extend(
                    [GrammarRule(nonterm, rule.rhs) for rule in com_rules])
                new_rules.extend([
                    GrammarRule(next_char, rule.rhs[1:] + [next_char])
                    for rule in rec_rules
                ])
                new_rules.extend([
                    GrammarRule(next_char, rule.rhs[1:]) for rule in rec_rules
                ])
                new_nonterm.insert(new_nonterm.index(nonterm) + 1, next_char)
                next_char = GrammarSymbol(
                    get_next_char(next_char.mark,
                                  new_nonterm + grammar.terminal))
            else:
                new_rules.extend(rules)

        self._grammar.rules = new_rules
        self._grammar.nonterminal = new_nonterm
Ejemplo n.º 4
0
class AgentInterface:
    def __init__(self, name, quit_flag: Event, ip_addr='', ip_port=0):
        self.name = name
        self.__flaq_quit = quit_flag
        self.__ip_adress = ip_addr
        self.__ip_port = ip_port
        self.__connection = None
        self.__sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.__sock.setblocking(True)
        self.__connected = False
        self.q_data_in = SimpleQueue()
        self.q_sys_in = SimpleQueue()
        self.agent_status = ''
        self.hw_status = ''
        self.enabled = False
        self.logger = logging.getLogger("manager")

    def set_ip_address(self, addr, port):
        self.__ip_adress = addr
        self.__ip_port = port

    def connect(self):
        if not self.__ip_port or not self.__ip_adress:
            self.logger.error(
                f"Primero se debe setear la direccion y puerto IP del agente {self.name}"
            )
            return
        Thread(target=self.__connect_insist,
               name=f"AgentInterface({self.name}).__connect_insist",
               daemon=True).start()
        Thread(target=self.__receive,
               name=f"AgentInterface({self.name}).__receive",
               daemon=True).start()
        Thread(target=self.__check_state,
               name=f"AgentInterface({self.name}).__check_state",
               daemon=True).start()

    def __check_state(self):
        while not self.__flaq_quit.is_set():
            if self.__connected:
                self.send_msg(Message.cmd_query_agent_state())
                if not self.__flaq_quit.wait(1):
                    self.send_msg(Message.cmd_query_hw_state())
                    self.__flaq_quit.wait(1)

    def __connect_insist(self):
        error = False
        self.__connected = False
        while not self.__connected and not self.__flaq_quit.is_set():
            try:
                self.__sock.connect((self.__ip_adress, self.__ip_port))
                self.__connected = True
            except ConnectionRefusedError:
                if not error:
                    error = True
                    self.logger.warning(
                        f"Conexión rechazada a agente {self.name}. {self.__ip_adress}:{self.__ip_port}. Reintentando."
                    )
                self.__flaq_quit.wait(1)
            except Exception:
                if not error:
                    error = True
                self.logger.exception(
                    f"Fallo de conexión a {self.__ip_adress}:{self.__ip_port}. Reintentando"
                )
                self.__flaq_quit.wait(1)

    def __receive(self):
        cmd = b''
        while not self.__flaq_quit.is_set():
            if self.__connected:
                try:
                    bt = self.__sock.recv(1)
                    if not bt:
                        self.__flaq_quit.wait(0.1)
                        raise ConnectionResetError
                    elif bt == Message.EOT:
                        msg = Message.deserialize(cmd)
                        if msg.typ == Message.AGENT_STATE:
                            self.agent_status = msg.arg
                        elif msg.typ == Message.HW_STATE:
                            self.hw_status = msg.arg
                        elif msg.typ == Message.SYS_STATE:
                            self.q_sys_in.put(msg.arg)
                        elif msg.typ == Message.DATA:
                            self.q_data_in.put(msg.arg)
                        cmd = b''
                    else:
                        cmd += bt
                except TimeoutError:
                    pass
                except ConnectionResetError:
                    self.__connect_insist()

    def disconnect(self):
        self.__sock.close()
        self.__connected = False

    def is_connected(self):
        return self.__connected

    def get_data(self, block=False):
        if block:
            return self.q_data_in.get()
        else:
            if not self.q_data_in.empty():
                return self.q_data_in.get()
            else:
                return None

    def get_sys_state(self, block=False):
        if block:
            return self.q_sys_in.get()
        else:
            if not self.q_sys_in.empty():
                return self.q_sys_in.get()
            else:
                return None

    def send_data(self, data):
        self.send_msg(Message.data_msg(data))

    def send_msg(self, msg: Message):
        if self.__connected:
            try:
                self.__sock.sendall(msg.serialize())
                return True
            except BrokenPipeError:
                self.logger.error(
                    f"No se pudo enviar el mensaje al puerto {self.__ip_port}: BrokenPipe."
                )
            except ConnectionResetError:
                self.logger.error(f"Agente desconectado")
        else:
            return False

    def quit(self):
        self.__flaq_quit.set()
Ejemplo n.º 5
0
class VM():
    def __init__(self, d, io=None):
        self.d = d.copy()
        self.d += [0] * 10000
        self.i = 0
        self.base = 0
        self.io = SimpleQueue()
        if io is not None:
            self.io.put(io)

    def run(self):
        i = self.i
        io = self.io
        d = self.d

        def get_val(imm, i):
            return d[get_addr(imm, i)]

        def get_addr(imm, i):
            if imm == 1:
                return i
            elif imm == 2:
                return self.base + d[i]
            else:
                return d[i]

        output = []
        while True:
            instr = d[i] % 100
            imm_a, imm_b, imm_c = d[i] // 100 % 10, d[i] // 1000 % 10, d[
                i] // 10000
            #print(f"[{i}][{self.base}][{[imm_a, imm_b, imm_c]}] instr = {instr}")
            if instr == 1:
                d[get_addr(imm_c, i +
                           3)] = get_val(imm_a, i + 1) + get_val(imm_b, i + 2)
                i += 4
            elif instr == 2:
                d[get_addr(imm_c, i +
                           3)] = get_val(imm_a, i + 1) * get_val(imm_b, i + 2)
                i += 4
            elif instr == 3:
                d[get_addr(imm_a, i + 1)] = io.get()
                i += 2
            elif instr == 4:
                output.append(get_val(imm_a, i + 1))
                i += 2
            elif instr == 5:
                if get_val(imm_a, i + 1) != 0:
                    i = get_val(imm_b, i + 2)
                else:
                    i += 3
            elif instr == 6:
                if get_val(imm_a, i + 1) == 0:
                    i = get_val(imm_b, i + 2)
                else:
                    i += 3
            elif instr == 7:
                d[get_addr(imm_c, i + 3)] = int(
                    get_val(imm_a, i + 1) < get_val(imm_b, i + 2))
                i += 4
            elif instr == 8:
                d[get_addr(imm_c, i + 3)] = int(
                    get_val(imm_a, i + 1) == get_val(imm_b, i + 2))
                i += 4
            elif instr == 9:
                self.base += get_val(imm_a, i + 1)
                i += 2
            else:
                assert d[i] == 99
                return output
        self.i = i
        return output
from queue import SimpleQueue, Empty
q = SimpleQueue()
items = ["a", 5, True]
for item in items:
    q.put(item)
# while not q.empty():
try:
    while True:
        print(q.get(timeout=1))
except Empty:
    print("Queue is empty")
Ejemplo n.º 7
0
class CallbackThread(object):
    def __init__(self, name):
        # Don't need this in python, queue already has one
        # self.m_mutex = threading.Lock()

        self.m_listeners = UidVector()
        self.m_queue = Queue()
        self.m_pollers = UidVector()

        self.m_active = False
        self.name = name

    #
    # derived must implement the following
    #

    def matches(self, listener, data):
        raise NotImplementedError

    def setListener(self, data, listener_uid):
        raise NotImplementedError

    def doCallback(self, callback, data):
        raise NotImplementedError

    #
    # Impl
    #

    def start(self):
        self.m_active = True
        self._thread = SafeThread(target=self.main, name=self.name)

    def stop(self):
        self.m_active = False
        self.m_queue.put(None)

    def sendPoller(self, poller_uid, *args):
        # args are (listener_uid, item)
        poller = self.m_pollers.get(poller_uid)
        if poller:
            with poller.poll_cond:
                poller.poll_queue.append(args)
                poller.poll_cond.notify()

    def main(self):
        # micro-optimization: lift these out of the loop
        doCallback = self.doCallback
        matches = self.matches
        queue_get = self.m_queue.get
        setListener = self.setListener
        listeners_get = self.m_listeners.get
        listeners_items = self.m_listeners.items

        while True:
            item = queue_get()
            if not item:
                logger.debug("%s thread no longer active", self.name)
                break

            listener_uid, item = item
            if listener_uid is not None:
                listener = listeners_get(listener_uid)
                if listener and matches(listener, item):
                    setListener(item, listener_uid)
                    cb = listener.callback
                    if cb:
                        try:
                            doCallback(cb, item)
                        except Exception:
                            logger.warning(
                                "Unhandled exception processing %s callback",
                                self.name,
                                exc_info=True,
                            )
                    elif listener.poller_uid is not None:
                        self.sendPoller(listener.poller_uid, listener_uid, item)
            else:
                # Use copy because iterator might get invalidated
                for listener_uid, listener in list(listeners_items()):
                    if matches(listener, item):
                        setListener(item, listener_uid)
                        cb = listener.callback
                        if cb:
                            try:
                                doCallback(cb, item)
                            except Exception:
                                logger.warning(
                                    "Unhandled exception processing %s callback",
                                    self.name,
                                    exc_info=True,
                                )
                        elif listener.poller_uid is not None:
                            self.sendPoller(listener.poller_uid, listener_uid, item)

        # Wake any blocked pollers
        for poller in self.m_pollers.values():
            poller.terminate()
Ejemplo n.º 8
0
    def makeArt(self, save=True):
        if save:
            if os.path.exists("res_img"):
                rmtree("res_img")
            os.mkdir("res_img")
        i = 0
        to_show = np.zeros(self.img.shape, dtype=np.uint8)
        q = SimpleQueue()
        q.put(self.quadtree.root)
        q.put(None)
        while not q.empty():
            node = q.get()
            if node is None:
                plt.imshow(to_show)
                plt.pause(1)
                if save:
                    imsave("res_img/img" + str(i) + ".png", to_show)
                    i += 1
                if q.empty():
                    return
                else:
                    q.put(None)
                    continue

            rect = node.rect
            img_rect = self.img[rect.l:rect.l + rect.nrows,
                                rect.c:rect.c + rect.ncols]
            to_show[rect.l:rect.l + rect.nrows,
                    rect.c:rect.c + rect.ncols] = np.mean(img_rect.reshape(
                        -1, 3),
                                                          axis=0)

            if node.NW:
                q.put(node.NW)
            if node.NE:
                q.put(node.NE)
            if node.SW:
                q.put(node.SW)
            if node.SE:
                q.put(node.SE)

        plt.show()
Ejemplo n.º 9
0
class CamModel:
    def __init__(self,
                 msg_pipe: Connection,
                 img_pipe: Connection,
                 cam_index: int = 0):
        set_event_loop(new_event_loop())
        self._msg_pipe = msg_pipe
        self._img_pipe = img_pipe
        self._cam_index = cam_index
        self._cam_reader = StreamReader(cam_index)
        self._size_gtr = SizeGetter(self._cam_reader)
        self._stop_event = Event()
        self._write_q = SimpleQueue()
        self._cam_writer = StreamWriter()
        self._tasks = []
        self._switcher = {
            defs.ModelEnum.STOP: self._stop_writing,
            defs.ModelEnum.START: self._start_writing,
            defs.ModelEnum.SET_USE_CAM: self._use_cam,
            defs.ModelEnum.SET_USE_FEED: self._use_feed,
            defs.ModelEnum.CLEANUP: self.cleanup,
            defs.ModelEnum.INITIALIZE: self.init_cam,
            defs.ModelEnum.GET_FPS: self._get_fps,
            defs.ModelEnum.SET_FPS: self._set_fps,
            defs.ModelEnum.GET_RES: self._get_res,
            defs.ModelEnum.SET_RES: self._set_res,
            defs.ModelEnum.COND_NAME: self._update_cond_name,
            defs.ModelEnum.BLOCK_NUM: self._update_block_num,
            defs.ModelEnum.KEYFLAG: self._update_keyflag,
            defs.ModelEnum.EXP_STATUS: self._update_exp_status,
            defs.ModelEnum.LANGUAGE: self.set_lang,
            defs.ModelEnum.OVERLAY: self._toggle_overlay,
        }
        self._running = True
        self._process_imgs = False
        self._writing = False
        self._show_feed = False
        self._frame_size = self._cam_reader.get_resolution()
        self._handle_frames = Event()
        self._loop = get_event_loop()

        self._strings = strings[LangEnum.ENG]
        self._fps = 30
        self._cam_name = "CAM_" + str(self._cam_index)
        self._cond_name = str()
        self._exp_status = self._strings[StringsEnum.EXP_STATUS_STOP]
        self._exp_running = False
        self._block_num = 0
        self._keyflag = str()
        self.set_lang()

        self._test_task = None
        self._num_img_workers = 2
        self._sems1 = list()
        self._sems2 = list()
        self._sems3 = list()
        self._shm_ovl_arrs = list()
        self._shm_img_arrs = list()
        self._np_img_arrs = list()
        self._num_writes_arrs = list()
        self._use_overlay = True
        self._proc_thread = Thread(target=None, args=())
        cur_res = self._cam_reader.get_resolution()
        self._cur_arr_shape = (int(cur_res[1]), int(cur_res[0]), 3)
        self._cur_arr_size = self._cur_arr_shape[0] * self._cur_arr_shape[
            1] * self._cur_arr_shape[2]
        self._executor = ThreadPoolExecutor()
        self._loop.run_until_complete(self._start_loop())

    async def _handle_pipe(self) -> None:
        """
        Handle msgs from model.
        :return None:
        """
        try:
            while self._running:
                if self._msg_pipe.poll():
                    msg = self._msg_pipe.recv()
                    if msg[0] in self._switcher.keys():
                        if msg[1] is not None:
                            self._switcher[msg[0]](msg[1])
                        else:
                            self._switcher[msg[0]]()
                await asyncsleep(.02)
        except BrokenPipeError as bpe:
            pass
        except OSError as ose:
            pass
        except Exception as e:
            raise e

    def cleanup(self, discard: bool) -> None:
        """
        Cleanup this code and prep for app closure.
        :param discard: Quit without saving.
        :return None:
        """
        create_task(self._cleanup(discard))

    def init_cam(self) -> None:
        """
        Begin initializing camera.
        :return None:
        """
        self._test_task = create_task(self._run_tests())

    def set_lang(self, lang: LangEnum = LangEnum.ENG) -> None:
        """
        Set this camera's language.
        :param lang: The new language enum.
        :return None:
        """
        self._strings = strings[lang]
        self._set_texts()

    async def _run_tests(self) -> None:
        """
        Run each camera test in order.
        :return None:
        """
        prog_tracker = create_task(self._monitor_init_progress())
        sizes = await self._size_gtr.get_sizes()
        if len(sizes) < 1:
            self._msg_pipe.send((defs.ModelEnum.FAILURE, None))
            prog_tracker.cancel()
            return
        self._msg_pipe.send((defs.ModelEnum.START, (self._fps, sizes)))
        prog_tracker.cancel()
        self._proc_thread = Thread(target=self._start_frame_processing,
                                   args=())
        self._proc_thread.start()

    async def _monitor_init_progress(self) -> None:
        """
        Periodically update controller on init progress.
        :return None:
        """
        while True:
            if self._size_gtr.status >= 100:
                break
            self._msg_pipe.send(
                (defs.ModelEnum.STAT_UPD, self._size_gtr.status))
            await asyncsleep(.5)

    async def _await_reader_err(self) -> None:
        """
        Handle if reader fails.
        :return None:
        """
        while self._running:
            await self._cam_reader.await_err()
            self._msg_pipe.send((defs.ModelEnum.FAILURE, None))

    async def _cleanup(self, discard: bool) -> None:
        self._running = False
        if self._test_task is not None:
            if self._test_task.done():
                await self._test_task
            else:
                self._test_task.cancel()
        self._size_gtr.stop()
        self._stop()
        self._cam_reader.cleanup()
        self._cam_writer.cleanup(discard)
        self._msg_pipe.send((defs.ModelEnum.CLEANUP, None))

    def _refresh_np_arrs(self) -> None:
        self._np_img_arrs = list()
        for j in range(self._num_img_workers):
            self._np_img_arrs.append(
                frombuffer(self._shm_img_arrs[j].get_obj(),
                           count=self._cur_arr_size,
                           dtype=DTYPE).reshape(self._cur_arr_shape))

    def _update_block_num(self, num: int) -> None:
        """
        Update the block num shown on camera details.
        :param num: The new num to show.
        :return None:
        """
        self._block_num = num

    def _update_cond_name(self, name: str) -> None:
        """
        Update the condition name shown on camera details.
        :param name: The new name to show.
        :return None:
        """
        self._cond_name = name

    def _update_keyflag(self, flag: str) -> None:
        """
        Update the key flag shown on camera details.
        :param flag: The new key flag to show.
        :return None:
        """
        self._keyflag = flag

    def _update_exp_status(self, status: bool) -> None:
        """
        Update the experiment status shown on the camera details.
        :param status: The new status to show.
        :return None:
        """
        self._exp_running = status
        self._set_texts()

    def _toggle_overlay(self, is_active: bool) -> None:
        """
        toggle whether to use overlay on this camera.
        :param is_active: Whether to use overlay.
        :return None:
        """
        self._use_overlay = is_active

    def _get_res(self) -> None:
        """
        Send the current resolution of this camera.
        :return None:
        """
        self._msg_pipe.send(
            (defs.ModelEnum.CUR_RES, self._cam_reader.get_resolution()))

    def _set_res(self, new_res: (float, float)) -> None:
        """
        Change the resolution on this camera.
        :param new_res: The new resolution to use.
        :return None:
        """
        if new_res == self._cam_reader.get_resolution():
            return
        self._show_feed = False
        self._cam_reader.stop_reading()
        self._stop_frame_processing()
        self._cam_reader.set_resolution(new_res)
        self._times = deque()
        cur_res = self._cam_reader.get_resolution()
        self._cur_arr_shape = (int(cur_res[1]), int(cur_res[0]), 3)
        self._cur_arr_size = self._cur_arr_shape[0] * self._cur_arr_shape[
            1] * self._cur_arr_shape[2]
        self._proc_thread = Thread(target=self._start_frame_processing,
                                   args=())
        self._cam_reader.start_reading()
        self._proc_thread.start()
        self._show_feed = True

    def _use_cam(self, is_active: bool) -> None:
        """
        Toggle whether this cam is being used.
        :param is_active: Whether this cam is being used.
        :return None:
        """
        if is_active:
            self._cam_reader.start_reading()
            self._handle_frames.set()
        else:
            self._cam_reader.stop_reading()
            self._handle_frames.clear()

    def _use_feed(self, is_active: bool) -> None:
        """
        Toggle whether this cam feed is being passed to the view.
        :param is_active: Whether this cam feed is being passed to the view.
        :return None:
        """
        self._show_feed = is_active

    def _start_writing(self, path: str) -> None:
        """
        Create new writer and set boolean to start putting frames in write queue.
        :return None:
        """
        # filename = path + "CAM_" + str(self._cam_index) + "_" + format_current_time(datetime.now(), save=True) + ".avi"
        filename = path + "CAM_" + str(self._cam_index) + ".avi"
        x, y = self._cam_reader.get_resolution()
        self._frame_size = (int(x), int(y))
        self._write_q = SimpleQueue()
        self._cam_writer = StreamWriter()
        self._cam_writer.start(filename, int(self._fps), self._frame_size,
                               self._write_q)
        self._writing = True

    def _stop_writing(self) -> None:
        """
        Destroy writer and set boolean to stop putting frames in write queue.
        :return None:
        """
        self._writing = False
        while not self._write_q.empty():
            tsleep(.05)
        self._cam_writer.cleanup()
        self._msg_pipe.send((defs.ModelEnum.STOP, None))

    async def _start_loop(self) -> None:
        """
        Run all async tasks in this model and wait for stop signal. (This method is the main loop for this process)
        :return None:
        """
        self._tasks.append(create_task(self._handle_pipe()))
        self._tasks.append(create_task(self._await_reader_err()))
        await self._stop_event.wait()

    def _start_frame_processing(self) -> None:
        """
        Create image processing threads and wait for stop signal.
        :return None:
        """
        self._process_imgs = True
        max_res = defs.common_resolutions[-1]
        max_img_arr_shape = (int(max_res[1]), int(max_res[0]), 3)
        max_img_arr_size = max_img_arr_shape[0] * max_img_arr_shape[
            1] * max_img_arr_shape[2]
        self._sems1 = list()
        self._sems2 = list()
        self._sems3 = list()
        self._shm_ovl_arrs = list()
        self._shm_img_arrs = list()
        self._np_img_arrs = list()
        self._num_writes_arrs = list()
        for i in range(self._num_img_workers):
            self._sems1.append(Semaphore(0))
            self._sems2.append(Semaphore(0))
            self._sems3.append(Semaphore(1))
            self._shm_ovl_arrs.append(Array(c_char, BYTESTR_SIZE))
            self._shm_img_arrs.append(Array('Q', max_img_arr_size))
            self._num_writes_arrs.append(Value('i', 1))
            worker_args = (self._shm_img_arrs[i], self._sems1[i],
                           self._sems2[i], self._shm_ovl_arrs[i])
            worker = Thread(target=self._img_processor,
                            args=worker_args,
                            daemon=True)
            worker.start()
        self._refresh_np_arrs()
        distributor = Thread(target=self._distribute_frames,
                             args=(),
                             daemon=True)
        distributor.start()
        handler = Thread(target=self._handle_processed_frames,
                         args=(),
                         daemon=True)
        handler.start()
        while self._process_imgs:
            tsleep(1)

    def _stop_frame_processing(self) -> None:
        """
        Stop proc_thread and join it.
        :return None:
        """
        self._process_imgs = False
        self._proc_thread.join()

    def _stop(self) -> None:
        """
        Stop all async tasks.
        :return None:
        """
        for task in self._tasks:
            task.cancel()
        self._process_imgs = False
        if self._proc_thread.is_alive():
            self._proc_thread.join()
        self._stop_event.set()

    def _get_fps(self) -> None:
        """
        Send the current fps of this camera.
        :return None:
        """
        self._msg_pipe.send(
            (defs.ModelEnum.CUR_FPS, self._cam_reader.get_fps_setting()))

    def _set_fps(self, new_fps: float) -> None:
        """
        Set new fps and reset fps tracking.
        :param new_fps: The new fps to use.
        :return None:
        """
        self._times = deque()
        self._cam_reader.set_fps(new_fps)
        self._fps = int(new_fps)

    def _distribute_frames(self) -> None:
        """
        Distribute frames in proper order to image_worker processes.
        :return None:
        """
        i = 0
        while self._process_imgs:
            ret, val = self._cam_reader.get_next_new_frame()
            if ret:
                (frame, timestamp, num_writes) = val
                self._hand_out_frame(frame, timestamp, i, num_writes)
                i = self._increment_counter(i)
            else:
                tsleep(.001)

    def _hand_out_frame(self, frame, timestamp: datetime, i: int,
                        num_writes: int) -> None:
        """
        Helper function for self._distribute_frames()
        :param frame: The frame to put an overlay on.
        :param timestamp: A datetime object to add to the overlay.
        :param i: Which arrays to access.
        :param num_writes: The number of times to write this frame to save file.
        :return None:
        """
        overlay = shorten(self._cond_name, COND_NAME_WIDTH) + CM_SEP + \
                  format_current_time(timestamp, time=True, mil=True) + CM_SEP + self._exp_status + CM_SEP + \
                  str(self._block_num) + CM_SEP + str(self._keyflag) + CM_SEP + str(self._cam_reader.get_fps_actual())\
                  + "/" + str(self._fps)
        self._sems3[i].acquire()
        copyto(self._np_img_arrs[i], frame)
        self._shm_ovl_arrs[i].value = (overlay.encode())
        self._num_writes_arrs[i].value = num_writes
        self._sems1[i].release()

    def _increment_counter(self, num: int) -> int:
        """
        Helper function for self._distribute_frames()
        :param num: The integer to increment from.
        :return int: The incremented integer.
        """
        return (num + 1) % self._num_img_workers

    def _img_processor(self, sh_img_arr: Array, sem1: Semaphore,
                       sem2: Semaphore, ovl_arr: Array) -> None:
        """
        Process images as needed.
        :param sh_img_arr: The array containing the frame to work with.
        :param sem1: The entrance lock.
        :param sem2: The exit lock.
        :param ovl_arr: The array containing the overlay work with.
        :return None:
        """
        img_dim = (EDIT_HEIGHT, self._cur_arr_shape[1], self._cur_arr_shape[2])
        img_size = int(EDIT_HEIGHT * img_dim[1] * img_dim[2])
        img_arr = frombuffer(sh_img_arr.get_obj(), count=img_size,
                             dtype=DTYPE).reshape(img_dim)
        while self._process_imgs:
            sem1.acquire()
            if self._use_overlay:
                img_pil = Image.fromarray(img_arr)
                draw = ImageDraw.Draw(img_pil)
                draw.text(OVL_POS,
                          text=ovl_arr.value.decode(),
                          font=OVL_FONT,
                          fill=OVL_CLR)
                processed_img = asarray(img_pil)
                copyto(img_arr, processed_img)
            sem2.release()

    def _handle_processed_frames(self) -> None:
        """
        Handle processed frames in proper order from ImgWorker processes.
        :return None:
        """
        i = 0
        while self._process_imgs:
            self._sems2[i].acquire()
            frame = self._np_img_arrs[i]
            if self._writing:
                for p in range(self._num_writes_arrs[i].value):
                    self._write_q.put(copy(frame))
            if self._show_feed:
                to_send = self.image_resize(frame, width=640)
                self._img_pipe.send(to_send)
            self._sems3[i].release()
            i = self._increment_counter(i)

    def _set_texts(self) -> None:
        """
        Set the initial texts for this camera.
        :return None:
        """
        if self._exp_running:
            self._exp_status = self._strings[StringsEnum.EXP_STATUS_RUN]
        else:
            self._exp_status = self._strings[StringsEnum.EXP_STATUS_STOP]

    # from https://stackoverflow.com/questions/44650888/resize-an-image-without-distortion-opencv
    @staticmethod
    def image_resize(image, width=None, height=None, inter=INTER_AREA):
        # initialize the dimensions of the image to be resized and
        # grab the image size
        dim = None
        (h, w) = image.shape[:2]

        # if both the width and height are None, then return the
        # original image
        if width is None and height is None:
            return image

        # check to see if the width is None
        if width is None:
            # calculate the ratio of the height and construct the
            # dimensions
            r = height / float(h)
            dim = (int(w * r), height)

        # otherwise, the height is None
        else:
            # calculate the ratio of the width and construct the
            # dimensions
            r = width / float(w)
            dim = (width, int(h * r))

        # resize the image
        resized = resize(image, dim, interpolation=inter)

        # return the resized image
        return resized
Ejemplo n.º 10
0
class GcRecorder(object):

    def __init__(self, rpc):
        super(GcRecorder, self).__init__()
        self.should_stop = False
        L.debug('session:{} initializing gc recorder'.format(rpc.session_id))
        self.record_rpc = rpc
        self.gc_recorder = dict()
        self.gc_queue = SimpleQueue()
        if "EGGROLL_GC_DISABLE" in os.environ and os.environ["EGGROLL_GC_DISABLE"] == '1':
            L.info("global gc is disable, "
                  "will not execute gc but only record temporary RollPair during the whole session")
        else:
            self.gc_thread = Thread(target=self.run, daemon=True)
            self.gc_thread.start()
            L.debug("starting gc_thread......")

    def stop(self):
        self.should_stop = True

    def run(self):
        if "EGGROLL_GC_DISABLE" in os.environ and os.environ["EGGROLL_GC_DISABLE"] == '1':
            L.info("global gc switch is close, "
                  "will not execute gc but only record temporary RollPair during the whole session")
            return
        while not self.should_stop:
            try:
                rp_name = self.gc_queue.get(block=True, timeout=0.5)
            except queue.Empty:
                continue
            if not rp_name:
                continue
            L.info(f"GC thread destroying rp:{rp_name}")
            self.record_rpc.load(namespace=self.record_rpc.get_session().get_session_id(),
                                     name=rp_name).destroy()

    def record(self, er_store: ErStore):
        store_type = er_store._store_locator._store_type
        name = er_store._store_locator._name
        namespace = er_store._store_locator._namespace
        if store_type != StoreTypes.ROLLPAIR_IN_MEMORY:
            return
        else:
            L.info("GC recording in memory table namespace={}, name={}, type={}"
                  .format(namespace, name, store_type))
            count = self.gc_recorder.get(name)
            if count is None:
                count = 0
            self.gc_recorder[name] = count + 1
            L.info(f"GC recorded count={len(self.gc_recorder)}")

    def decrease_ref_count(self, er_store):
        if er_store._store_locator._store_type != StoreTypes.ROLLPAIR_IN_MEMORY:
            return
        ref_count = self.gc_recorder.get(er_store._store_locator._name)
        record_count = 0 if ref_count is None or ref_count == 0 else (ref_count - 1)
        self.gc_recorder[er_store._store_locator._name] = record_count
        if record_count == 0 and er_store._store_locator._name in self.gc_recorder:
            L.info(f'GC put in queue:{er_store._store_locator._name}')
            self.gc_queue.put(er_store._store_locator._name)
            self.gc_recorder.pop(er_store._store_locator._name)
Ejemplo n.º 11
0
class HttpSyncedDictionary:
    """
    Contains a dictionary that can be updated and queried locally.
    The updates are sent to a HTTP server, as reply, the current dictionary
    known to the HTTP server is expected.  This is used to update the
    local dictionary.

    In effect, the dictionary can be synchronized across multiple instances
    all using the same server.

    The synchronization happens in a separate thread and is limited by the
    time needed for HTTP POST send/receive.
    """
    def __init__(self, server_url, keys_to_filter=[]):
        """
        :param keys_to_filter: Iterable of keys in the synchronized dictionary.
            In order to not overwrite the values which are produced locally and
            thus more accurate locally than on the server, provide the keys to
            those values here.  Values from the remote server for those keys will
            be ignored.
        """
        self.data = {}
        self.inbox = SimpleQueue()

        self.server_url = server_url
        self.keys_to_filter = keys_to_filter

        self.thread = Thread(target=self._thread_function)
        self.daemon = True
        self.is_thread_running = False

    def _thread_function(self):
        while self.is_thread_running:
            new_data = {}
            while not self.inbox.empty():  # only use latest queue element
                new_data = self.inbox.get_nowait()
            response = requests.post(self.server_url, json=new_data)
            if response.ok:
                remote_status = response.json()
                for key in self.keys_to_filter:
                    remote_status.pop(key, None)
                self.data.update(remote_status)

    def start(self):
        if not self.is_thread_running:
            self.is_thread_running = True
            self.thread.start()

    def stop(self):
        self.is_thread_running = False
        self.thread.join()

    def update(self, dictionary):
        self.data.update(dictionary)
        self.inbox.put(dictionary)

    def get(self, key=None, default_value=None):
        if key is not None:
            return self.data.get(key, default_value)
        else:
            return self.data
Ejemplo n.º 12
0
class Recorder():
    def __init__(self):
        # default recordings directory
        self.rec_dir = path.join(getcwd(), "recordings")
        if not path.exists(self.rec_dir):
            mkdir(self.rec_dir)

        # starting sequence numbers
        self.seq_num = 0
        self.format = 'avi'
        self.fourcc = cv2.VideoWriter_fourcc(*'FFV1')

        self.buffer = SimpleQueue()
        self.saveThread = None
        self.isRecording = False

    def startRecording(self,
                       filename,
                       fps=30,
                       framesize=(640, 480),
                       isColor=True,
                       starting_seq_num=0,
                       overwrite=False) -> bool:
        if self.saveThread is not None:
            return False

        # sanitize input parameters
        filename = filename.split(".")[0]
        if len(filename) == 0:
            raise TypeError("Please supply a valid filename.")
        max_seq_num = 99999
        if starting_seq_num < 0 or starting_seq_num > max_seq_num:
            raise TypeError(
                "Be reasonable! Please chose a sequence number between 0-{}.".
                format(max_seq_num))

        # find a suitable filename
        self.seq_num = starting_seq_num if starting_seq_num > self.seq_num else self.seq_num
        full_filename = f'{self.seq_num:05d}_{filename}.{self.format}'
        if not overwrite:
            for seq_num in range(self.seq_num, max_seq_num + 1):
                full_filename = f'{self.seq_num:05d}_{filename}.{self.format}'
                if path.exists(path.join(self.rec_dir, full_filename)):
                    self.seq_num = seq_num + 1  # prepare for the next file
                else:
                    break

        writer = cv2.VideoWriter(path.join(self.rec_dir, full_filename),
                                 self.fourcc, fps, framesize, isColor)
        self.saveThread = Thread(target=saveVideoFromQueue,
                                 args=[writer, self.buffer, full_filename])
        self.saveThread.start()

        self.isRecording = True
        return True

    def record(self, image):
        self.buffer.put(image)

    def stopRecording(self):
        if not self.isRecording:
            return

        self.buffer.put(None)
        self.saveThread.join()
        self.saveThread = None
        self.isRecording = False
Ejemplo n.º 13
0
def job(n: int, results: SimpleQueue) -> None:  # <6>
    results.put((n, check(n)))  # <7>
Ejemplo n.º 14
0
class Computer(Thread):
    def __init__(self, computer_init: list):
        super().__init__()

        self.program = GrowingList(computer_init.copy())
        self.inputs = SimpleQueue()
        self.outputs = SimpleQueue()

    def run(self):
        index = 0
        relative_offset = 0

        while True:
            opcode, first_mode, second_mode, third_mode = Computer.parse_operation(
                "{0:05d}".format(self.program[index])
            )

            if opcode not in [1, 2, 3, 4, 5, 6, 7, 8, 9, 99]:
                raise ValueError("Intcode computer failed. Diagnostic incomplete...")

            if opcode == 99:
                break

            # Get indexes epending on the mode
            first_idx = (
                relative_offset + self.program[index + 1]
                if first_mode == 2
                else self.program[index + 1]
            )
            second_idx = (
                relative_offset + self.program[index + 2]
                if second_mode == 2
                else self.program[index + 2]
            )
            third_idx = (
                relative_offset + self.program[index + 3]
                if third_mode == 2
                else self.program[index + 3]
            )

            # Get parameter values depending on the mode
            first_param = first_idx if first_mode == 1 else self.program[first_idx]

            if opcode in [1, 2, 5, 6, 7, 8]:
                second_param = (
                    second_idx if second_mode == 1 else self.program[second_idx]
                )

            # Execute operation
            if opcode == 1:
                self.program[third_idx] = first_param + second_param
            elif opcode == 2:
                self.program[third_idx] = first_param * second_param
            elif opcode == 3:
                self.program[first_idx] = self.inputs.get()
            elif opcode == 4:
                self.outputs.put(first_param)
            elif opcode == 5:
                if first_param != 0:
                    index = second_param
                    continue
            elif opcode == 6:
                if first_param == 0:
                    index = second_param
                    continue
            elif opcode == 7:
                self.program[third_idx] = 1 if first_param < second_param else 0
            elif opcode == 8:
                self.program[third_idx] = 1 if first_param == second_param else 0
            elif opcode == 9:
                relative_offset += first_param

            if opcode in [1, 2, 7, 8]:
                index += 4
            elif opcode in [3, 4, 9]:
                index += 2
            else:
                index += 3

    def add_input(self, new_input: int):
        self.inputs.put(new_input)

    def get_output(self):
        return self.outputs.get()

    @staticmethod
    def parse_operation(operation: str):
        return (
            int(operation[-2:]),
            int(operation[-3]),
            int(operation[-4]),
            int(operation[-5]),
        )
Ejemplo n.º 15
0
class Crawler(Worker):
    def __init__(self, scheduler):
        super().__init__()
        connect_to_database()
        self.__scheduler = scheduler
        self.__repos = SimpleQueue()
        self.__next_crawl = datetime.datetime.now()

    def post_repo(self, repo_id):
        self.__repos.put(repo_id)

    async def work(self):
        try:
            await self.__process_repos()
        except Exception as e:
            logger.error("Processing repos failed: %s", e)

    async def __process_repos(self):
        logger.info("Start crawling")
        loop = asyncio.get_running_loop()

        if not os.path.exists(data_dir):
            os.makedirs(data_dir, exist_ok=True)
            logger.info("Created directory '%s'", data_dir)

        new_commits = False
        with database.session_scope() as session:
            if datetime.datetime.now() >= self.__next_crawl:
                logger.info("Crawl all repos")
                repos = session.query(database.Repo).all()
                self.__next_crawl = datetime.datetime.now(
                ) + datetime.timedelta(seconds=CRAWLER_PERIOD_SECONDS)
                self.reschedule_internally(CRAWLER_PERIOD_SECONDS)
            else:
                logger.info("Crawl manually triggered repos")
                repo_ids = [repo for repo in self.__get_repos()]
                repos = session.query(database.Repo).filter(
                    database.Repo.id.in_(repo_ids)).all()
            channels = session.query(database.Channel).all()
            for repo in repos:
                try:
                    work_dir = os.path.join(data_dir, str(repo.id))
                    controller = RepoController(work_dir)
                    if not controller.is_clone_of(repo.url):
                        logger.info("Create repo for URL '%s' in '%s'",
                                    repo.url, work_dir)
                        await loop.run_in_executor(None,
                                                   controller.create_new_repo,
                                                   repo.url)
                    logger.info("Setup SSH in '%s'", work_dir)
                    await loop.run_in_executor(None, controller.setup_ssh,
                                               repo.ecosystem.ssh_key,
                                               repo.ecosystem.known_hosts)
                    logger.info("Setup HTTP credentials in '%s'", work_dir)
                    credentials = [{
                        "url": c.url,
                        "username": c.username,
                        "password": c.password
                    } for c in repo.ecosystem.credentials]
                    await loop.run_in_executor(None, controller.setup_http,
                                               credentials)
                    logger.info("Fetch repo '%s' for URL '%s'", work_dir,
                                repo.url)
                    await loop.run_in_executor(None, controller.fetch)

                    branches = controller.get_remote_branches()
                    for channel in channels:
                        for branch in branches:
                            if not re.fullmatch(channel.branch, branch):
                                continue

                            logger.info("Branch '%s' matches '%s'", branch,
                                        channel.branch)
                            logger.info("Checkout branch '%s'", branch)
                            controller.checkout(branch)
                            sha = controller.get_sha()

                            commits = session.query(database.Commit).filter_by(
                                repo=repo, sha=sha, channel=channel)

                            # continue if this commit has already been stored
                            if list(commits):
                                logger.info("Commit '%s' exists", sha[:7])
                                continue

                            logger.info("Add commit '%s'", sha[:7])
                            commit = database.Commit()
                            commit.sha = sha
                            commit.message = controller.get_message()
                            commit.user_name = controller.get_user_name()
                            commit.user_email = controller.get_user_email()
                            commit.repo = repo
                            commit.channel = channel
                            commit.status = database.CommitStatus.new
                            session.add(commit)
                            new_commits = True

                            old_commits = session.query(
                                database.Commit).filter(
                                    database.Commit.repo == repo,
                                    database.Commit.channel == channel,
                                    database.Commit.sha != sha,
                                    database.Commit.status !=
                                    database.CommitStatus.old)
                            for c in old_commits:
                                logger.info("Set status of '%s' to 'old'",
                                            c.sha[:7])
                                c.status = database.CommitStatus.old
                except git.exc.GitError as e:
                    logger.error(
                        "Failed to process repo '%s' with message '%s'",
                        repo.url, e)

        if new_commits:
            logger.info("Finish crawling with *new* commits")
            logger.info('Trigger scheduler: process commits')
            try:
                self.__scheduler.process_commits()
            except (ApiException, MaxRetryError):
                logger.error("Failed to trigger scheduler")
        else:
            logger.info("Finish crawling with *no* new commits")

    def __get_repos(self):
        try:
            while True:
                yield self.__repos.get_nowait()
        except Empty:
            pass
Ejemplo n.º 16
0
class IntcodeRunner(object):
    def __init__(self, program, break_on_output=False, debug=False):
        self.ip = 0
        self.relative_base = 0
        self.program = program + [0 for _ in range(1000)]
        self.input_buffer = SimpleQueue()
        self.output_buffer = SimpleQueue()
        self._halted = False
        self._waiting = False
        self.break_on_output = break_on_output
        self._debug = False

    def add_input(self, input):
        self.input_buffer.put(input)
        self._waiting = False

    def get_output(self):
        if self.output_buffer.empty():
            return None
        return self.output_buffer.get()

    def got_output(self):
        return not self.output_buffer.empty()

    def program_has_output(self):
        return self.output_buffer.empty()

    def halted(self):
        return self._halted

    def waiting(self):
        return self._waiting

    def run_program(self):
        if self._halted:
            return
        while True:
            inst = [int(x) for x in str(self.program[self.ip])]
            if len(inst) == 1:
                opcode = inst[0]
                mode = []
            else:
                opcode = int(''.join([str(x) for x in inst[-2:]]))
                mode = inst[:-2]
                mode.reverse()

            # print(f'Running opcode {opcode}')

            if opcode == 1:
                self._add(mode)
            elif opcode == 2:
                self._mul(mode)
            elif opcode == 3:
                if self.waiting() or not self._input(mode):
                    break
            elif opcode == 4:
                self._output(mode)
                if self.break_on_output:
                    break
            elif opcode == 5:
                self._jmp_true(mode)
            elif opcode == 6:
                self._jmp_false(mode)
            elif opcode == 7:
                self._lt(mode)
            elif opcode == 8:
                self._eq(mode)
            elif opcode == 9:
                self._adjust_rb(mode)
            elif opcode == 99:
                self._halted = True
                print('Halting!')
                break
            else:
                print(f'Unknown opcode.. {opcode}')
                sys.exit(1)

    def adjust_mode(self, mode_array, nr_param):
        if len(mode_array) == nr_param:
            return mode_array
        missing = [0 for _ in range(nr_param - len(mode_array))]
        return mode_array + missing

    def get_mode_value(self, mode, ip_offset):
        if mode == 0:
            pass
            return self.program[self.program[self.ip + ip_offset]]
        elif mode == 1:
            return self.program[self.ip + ip_offset]
        elif mode == 2:
            return self.program[self.program[self.ip + ip_offset] +
                                self.relative_base]
        else:
            print(f'Got illigal mode! mode: {mode}')
            sys.exit(1)

    def expand_memory(self, nr):
        self.program = self.program + [0 for _ in range(nr * 2)]

    def write_mode_value(self, mode, ip_offset, value):
        if mode == 0:
            if self.program[self.ip + ip_offset] >= len(self.program):
                self.expand_memory(self.program[self.ip + ip_offset])

            self.program[self.program[self.ip + ip_offset]] = value
        elif mode == 1:
            print('Wrong write mode!')
            sys.exit(1)
        elif mode == 2:
            if (self.program[self.relative_base +
                             self.program[self.ip + ip_offset]] > len(
                                 self.program)):
                self.expand_memory(self.program[self.ip + ip_offset])

            self.program[self.relative_base +
                         self.program[self.ip + ip_offset]] = value
        else:
            print('Unknown write mode!')
            sys.exit(1)

    def _add(self, mode):
        mode = self.adjust_mode(mode, 3)
        param_1 = self.get_mode_value(mode[0], 1)
        param_2 = self.get_mode_value(mode[1], 2)
        self.write_mode_value(mode[2], 3, param_1 + param_2)
        self.ip += 4

    def _mul(self, mode):
        mode = self.adjust_mode(mode, 3)
        param_1 = self.get_mode_value(mode[0], 1)
        param_2 = self.get_mode_value(mode[1], 2)
        self.write_mode_value(mode[2], 3, param_1 * param_2)
        self.ip += 4

    def _input(self, mode):
        # Throws a queue.empty() if empty. We use this to input values
        mode = self.adjust_mode(mode, 1)
        if self.input_buffer.empty():
            self._waiting = True
            return False
        self.write_mode_value(mode[0], 1, self.input_buffer.get_nowait())
        self.ip += 2
        self._waiting = False
        return True

    def _output(self, mode):
        mode = self.adjust_mode(mode, 1)
        output = self.get_mode_value(mode[0], 1)
        self.output_buffer.put(output)
        self.ip += 2

    def _jmp_true(self, mode):
        # Jump if true
        mode = self.adjust_mode(mode, 2)
        jmp = self.get_mode_value(mode[0], 1)
        if jmp > 0:
            self.ip = self.get_mode_value(mode[1], 2)
        else:
            self.ip += 3

    def _jmp_false(self, mode):
        # Jump if false
        mode = self.adjust_mode(mode, 2)
        jmp = self.get_mode_value(mode[0], 1)
        if jmp == 0:
            self.ip = self.get_mode_value(mode[1], 2)
        else:
            self.ip += 3

    def _lt(self, mode):
        # less then
        mode = self.adjust_mode(mode, 3)
        param_1 = self.get_mode_value(mode[0], 1)
        param_2 = self.get_mode_value(mode[1], 2)

        if param_1 < param_2:
            self.write_mode_value(mode[2], 3, 1)
        else:
            self.write_mode_value(mode[2], 3, 0)
        self.ip += 4

    def _eq(self, mode):
        # equals
        mode = self.adjust_mode(mode, 3)
        param_1 = self.get_mode_value(mode[0], 1)
        param_2 = self.get_mode_value(mode[1], 2)

        if param_1 == param_2:
            self.write_mode_value(mode[2], 3, 1)
        else:
            self.write_mode_value(mode[2], 3, 0)
        self.ip += 4

    def _adjust_rb(self, mode):
        """Update the relative base."""
        mode = self.adjust_mode(mode, 1)
        self.relative_base += self.get_mode_value(mode[0], 1)
        self.ip += 2
        # print(f'RB updated, new value: {self.relative_base}')

    @classmethod
    def read_program_from_file(cls, path):
        with open('./input.txt') as fp:
            program_input = [int(x) for x in fp.read().split(',') if x]
        return program_input
Ejemplo n.º 17
0
class StartModulesTask(Task):
    """A task for starting DBus modules.

    The timeout service_start_timeout from the Anaconda bus
    configuration file is applied by default when the DBus
    method StartServiceByName is called.
    """
    def __init__(self, message_bus, activatable, forbidden, optional):
        """Create a new task.

        Anaconda modules are specified by their full DBus name or a prefix
        of their DBus name that ends with '*'.

        :param message_bus: a message bus
        :param activatable: a list of modules that can be activated.
        :param forbidden: a list of modules that are are not allowed to run
        :param optional: a list of modules that are optional
        """
        super().__init__()
        self._message_bus = message_bus
        self._activatable = activatable
        self._forbidden = forbidden
        self._optional = optional
        self._module_observers = []
        self._callbacks = SimpleQueue()

    @property
    def name(self):
        """Name of the task."""
        return "Start the modules"

    def run(self):
        """Run the task.

        :return: a list of observers
        """
        # Collect the modules.
        self._module_observers = self._find_modules()

        # Asynchronously start the modules.
        self._start_modules(self._module_observers)

        # Process the callbacks of the asynchronous calls.
        self._process_callbacks(self._module_observers)

        return self._module_observers

    @staticmethod
    def _match_module(name, patterns):
        """Match a module with one of the specified patterns."""
        for pattern in patterns:
            # Match the name prefix.
            if pattern.endswith("*") and name.startswith(pattern[:-1]):
                return True

            # Match the full name.
            if name == pattern:
                return True

        return False

    def _find_modules(self):
        """Find modules to start."""
        modules = []

        dbus = self._message_bus.proxy
        names = dbus.ListActivatableNames()

        for service_name in names:
            # Only activatable modules can be started.
            if not self._match_module(service_name, self._activatable):
                continue

            # Forbidden modules are not allowed to run.
            if self._match_module(service_name, self._forbidden):
                log.debug(
                    "Skip %s. The module won't be started, because it's "
                    "marked as forbidden in the Anaconda configuration "
                    "files.", service_name)
                continue

            log.debug("Found %s.", service_name)
            modules.append(ModuleObserver(
                self._message_bus,
                service_name,
            ))

        return modules

    def _start_modules(self, module_observers):
        """Start the modules."""
        dbus = self._message_bus.proxy

        for observer in module_observers:
            log.debug("Starting %s.", observer)

            dbus.StartServiceByName(
                observer.service_name,
                DBUS_FLAG_NONE,
                callback=self._start_service_by_name_callback,
                callback_args=(observer, ))

    def _start_service_by_name_callback(self, call, observer):
        """Callback for the StartServiceByName method."""
        self._callbacks.put(
            (observer, partial(self._start_service_by_name_handler, call)))

    def _start_service_by_name_handler(self, call, observer):
        """Handler for the StartServiceByName method."""
        try:
            returned = call()
        except Exception as error:  # pylint: disable=broad-except
            raise UnavailableModuleError(
                "Service {} has failed to start: {}".format(observer,
                                                            error)) from error

        if returned != DBUS_START_REPLY_SUCCESS:
            log.warning("Service %s is already running.", observer)
        else:
            log.debug("Service %s started successfully.", observer)

        # Connect the observer once the service is available.
        observer.service_available.connect(self._service_available_callback)
        observer.connect_once_available()
        return False

    def _service_available_callback(self, observer):
        """Callback for the service_available signal."""
        self._callbacks.put((observer, self._service_available_handler))

    def _service_available_handler(self, observer):
        """Handler for the service_available signal."""
        log.debug("%s is available.", observer)
        observer.proxy.Ping()
        return True

    def _process_callbacks(self, module_observers):
        """Process callbacks of the asynchronous calls.

        Process callbacks of the asynchronous calls until all modules
        are processed. A callback returns True if the module is processed,
        otherwise False.

        If a DBus call fails with an error, we raise an exception in the
        callback and immediately quit the task unless it comes from an
        add-on. A failure of an add-on module is not fatal, we just remove
        its observer from the list of available modules and continue.

        :param module_observers: a list of module observers
        """
        available = module_observers
        unprocessed = set(module_observers)

        while unprocessed:
            # Call the next scheduled callback.
            observer, callback = self._callbacks.get()

            try:
                is_available = callback(observer)

                # The module is not processed yet.
                if not is_available:
                    continue

            except UnavailableModuleError:
                # The failure of a required module is fatal.
                if not self._match_module(observer.service_name,
                                          self._optional):
                    raise

                # The failure of an optional module is not fatal. Remove
                # it from the list of available modules and continue.
                log.debug(
                    "Skip %s. The optional module has failed to start, "
                    "so it won't be available during the installation.",
                    observer.service_name)
                available.remove(observer)

            # The module is processed.
            unprocessed.discard(observer)
Ejemplo n.º 18
0
def computer(program, inputQ: queue.SimpleQueue, outputQ: queue.SimpleQueue):
    i = 0

    while i < len(program):
        instr = program[i]
        op = instr % 100
        modeP1 = instr // 100 % 10
        modeP2 = instr // 1000 % 10
        modeP3 = instr // 10000 % 10

        # Add or Multiplie
        if op == 1 or op == 2:
            p1 = program[i + 1] if modeP1 else program[program[i + 1]]
            p2 = program[i + 2] if modeP2 else program[program[i + 2]]
            result = p1 + p2 if op == 1 else p1 * p2
            if modeP3:
                program[i + 3] = result
            else:
                program[program[i + 3]] = result
            i += 4
        # Input
        elif op == 3:
            inp = inputQ.get()

            if modeP1:
                program[i + 1] = inp
            else:
                program[program[i + 1]] = inp
            i += 2
        # Output
        elif op == 4:
            outp = program[i + 1] if modeP1 else program[program[i + 1]]
            outputQ.put(outp)
            i += 2
        # Jump-If-True
        elif op == 5:
            p1 = program[i + 1] if modeP1 else program[program[i + 1]]
            p2 = program[i + 2] if modeP2 else program[program[i + 2]]
            if p1:
                i = p2
            else:
                i += 3
        # Jump-If-False
        elif op == 6:
            p1 = program[i + 1] if modeP1 else program[program[i + 1]]
            p2 = program[i + 2] if modeP2 else program[program[i + 2]]
            if p1:
                i += 3
            else:
                i = p2
        # Less-Than
        elif op == 7:
            p1 = program[i + 1] if modeP1 else program[program[i + 1]]
            p2 = program[i + 2] if modeP2 else program[program[i + 2]]
            result = 1 if p1 < p2 else 0
            if modeP3:
                program[i + 3] = result
            else:
                program[program[i + 3]] = result
            i += 4
        # Equals
        elif op == 8:
            p1 = program[i + 1] if modeP1 else program[program[i + 1]]
            p2 = program[i + 2] if modeP2 else program[program[i + 2]]
            result = 1 if p1 == p2 else 0
            if modeP3:
                program[i + 3] = result
            else:
                program[program[i + 3]] = result
            i += 4
        # End-Of-Program
        elif op == 99:
            break
        # Illegal Instruction
        else:
            print("Error: unknown Instruction")
            print(
                f"Instr:{str(instr).zfill(5)} Op:{op} P1:{modeP1} P2:{modeP2} P3:{modeP3}"
            )
            return
Ejemplo n.º 19
0
class NetworkConnection(object):
    class State(object):
        kCreated = 0
        kInit = 1
        kHandshake = 2
        kSynchronized = 3
        kActive = 4
        kDead = 5

    def __init__(self, uid, stream, notifier, handshake, get_entry_type, verbose=False):

        # logging debugging
        self.m_verbose = verbose

        self.m_uid = uid
        self.m_stream = stream
        self.m_notifier = notifier
        self.m_handshake = handshake
        self.m_get_entry_type = get_entry_type

        self.m_active = False
        self.m_proto_rev = 0x0300
        self.state = self.State.kCreated
        self.m_state_mutex = threading.Lock()
        self.m_last_update = 0

        self.m_outgoing = Queue()

        self.m_process_incoming = None
        self.m_read_thread = None
        self.m_write_thread = None

        self.m_remote_id_mutex = threading.Lock()
        self.m_remote_id = None
        self.m_last_post = 0

        self.m_pending_mutex = threading.Lock()
        self.m_pending_outgoing = []
        self.m_pending_update = {}

        # Condition variables for shutdown
        self.m_shutdown_mutex = threading.Lock()
        # Not needed in python
        # self.m_read_shutdown_cv = threading.Condition()
        # self.m_write_shutdown_cv = threading.Condition()
        self.m_read_shutdown = False
        self.m_write_shutdown = False

        # turn off Nagle algorithm; we bundle packets for transmission
        try:
            self.m_stream.setNoDelay()
        except IOError as e:
            logger.warning("Setting TCP_NODELAY: %s", e)

    def start(self):
        if self.m_active:
            return

        self.m_active = True
        self.set_state(self.State.kInit)

        # clear queue
        try:
            while True:
                self.m_outgoing.get_nowait()
        except Empty:
            pass

        # reset shutdown flags
        with self.m_shutdown_mutex:
            self.m_read_shutdown = False
            self.m_write_shutdown = False

        # start threads
        self.m_write_thread = SafeThread(
            target=self._writeThreadMain, name="nt-net-write"
        )
        self.m_read_thread = SafeThread(target=self._readThreadMain, name="nt-net-read")

    def __repr__(self):
        try:
            return "<NetworkConnection 0x%x %s>" % (id(self), self.info())
        except Exception:
            return "<NetworkConnection 0x%x ???>" % id(self)

    def stop(self):
        logger.debug("NetworkConnection stopping (%s)", self)

        if not self.m_active:
            return

        self.set_state(self.State.kDead)
        self.m_active = False
        # closing the stream so the read thread terminates
        self.m_stream.close()

        # send an empty outgoing message set so the write thread terminates
        self.m_outgoing.put([])

        # wait for threads to terminate, timeout
        self.m_write_thread.join(1)
        if self.m_write_thread.is_alive():
            logger.warning("%s did not die", self.m_write_thread.name)

        self.m_read_thread.join(1)
        if self.m_read_thread.is_alive():
            logger.warning("%s did not die", self.m_write_thread.name)

        # clear queue
        try:
            while True:
                self.m_outgoing.get_nowait()
        except Empty:
            pass

    def get_proto_rev(self):
        return self.m_proto_rev

    def get_stream(self):
        return self.m_stream

    def info(self):
        return ConnectionInfo(
            self.remote_id(),
            self.m_stream.getPeerIP(),
            self.m_stream.getPeerPort(),
            self.m_last_update,
            self.m_proto_rev,
        )

    def is_connected(self):
        return self.state == self.State.kActive

    def last_update(self):
        return self.m_last_update

    def set_process_incoming(self, func):
        self.m_process_incoming = func

    def set_proto_rev(self, proto_rev):
        self.m_proto_rev = proto_rev

    def set_state(self, state):
        with self.m_state_mutex:
            State = self.State

            # Don't update state any more once we've died
            if self.state == State.kDead:
                return

            # One-shot notify state changes
            if self.state != State.kActive and state == State.kActive:
                info = self.info()
                self.m_notifier.notifyConnection(True, info)
                logger.info(
                    "CONNECTED %s port %s (%s)",
                    info.remote_ip,
                    info.remote_port,
                    info.remote_id,
                )
            elif self.state != State.kDead and state == State.kDead:
                info = self.info()
                self.m_notifier.notifyConnection(False, info)
                logger.info(
                    "DISCONNECTED %s port %s (%s)",
                    info.remote_ip,
                    info.remote_port,
                    info.remote_id,
                )

            if self.m_verbose:
                logger.debug(
                    "%s: %s -> %s", self, _state_map[self.state], _state_map[state]
                )

            self.state = state

    # python optimization: don't use getter here
    # def state(self):
    #     return self.m_state

    def remote_id(self):
        with self.m_remote_id_mutex:
            return self.m_remote_id

    def set_remote_id(self, remote_id):
        with self.m_remote_id_mutex:
            self.m_remote_id = remote_id

    def uid(self):
        return self.m_uid

    def _sendMessages(self, msgs):
        self.m_outgoing.put(msgs)

    def _readThreadMain(self):
        decoder = WireCodec(self.m_proto_rev)

        verbose = self.m_verbose

        def _getMessage():
            decoder.set_proto_rev(self.m_proto_rev)
            try:
                return Message.read(self.m_stream, decoder, self.m_get_entry_type)
            except IOError as e:
                logger.warning("read error in handshake: %s", e)

                # terminate connection on bad message
                self.m_stream.close()

                return None

        self.set_state(self.State.kHandshake)

        try:
            handshake_success = self.m_handshake(self, _getMessage, self._sendMessages)
        except Exception:
            logger.exception("Unhandled exception during handshake")
            handshake_success = False

        if not handshake_success:
            self.set_state(self.State.kDead)
            self.m_active = False
        else:
            self.set_state(self.State.kActive)

            try:
                while self.m_active:
                    if not self.m_stream:
                        break

                    decoder.set_proto_rev(self.m_proto_rev)

                    try:
                        msg = Message.read(
                            self.m_stream, decoder, self.m_get_entry_type
                        )
                    except Exception as e:
                        if not isinstance(e, StreamEOF):
                            if verbose:
                                logger.exception("read error")
                            else:
                                logger.warning("read error: %s", e)

                        # terminate connection on bad message
                        self.m_stream.close()

                        break

                    if verbose:
                        logger.debug(
                            "%s received type=%s with str=%s id=%s seq_num=%s value=%s",
                            self.m_stream.sock_type,
                            msgtype_str(msg.type),
                            msg.str,
                            msg.id,
                            msg.seq_num_uid,
                            msg.value,
                        )

                    self.m_last_update = monotonic()
                    self.m_process_incoming(msg, self)
            except IOError as e:
                # connection died probably
                logger.debug("IOError in read thread: %s", e)
            except Exception:
                logger.warning("Unhandled exception in read thread", exc_info=True)

            self.set_state(self.State.kDead)
            self.m_active = False

        # also kill write thread
        self.m_outgoing.put([])

        with self.m_shutdown_mutex:
            self.m_read_shutdown = True

    def _writeThreadMain(self):
        encoder = WireCodec(self.m_proto_rev)

        verbose = self.m_verbose
        out = []

        try:
            while self.m_active:
                msgs = self.m_outgoing.get()

                if verbose:
                    logger.debug("write thread woke up")
                    if msgs:
                        logger.debug(
                            "%s sending %s messages", self.m_stream.sock_type, len(msgs)
                        )

                if not msgs:
                    continue

                encoder.set_proto_rev(self.m_proto_rev)

                # python-optimization: checking verbose causes extra overhead
                if verbose:
                    for msg in msgs:
                        if msg:
                            logger.debug(
                                "%s sending type=%s with str=%s id=%s seq_num=%s value=%s",
                                self.m_stream.sock_type,
                                msgtype_str(msg.type),
                                msg.str,
                                msg.id,
                                msg.seq_num_uid,
                                msg.value,
                            )
                            Message.write(msg, out, encoder)
                else:
                    for msg in msgs:
                        if msg:
                            Message.write(msg, out, encoder)

                if not self.m_stream:
                    break

                if not out:
                    continue

                self.m_stream.send(b"".join(out))

                del out[:]

                # if verbose:
                #    logger.debug('send %s bytes', encoder.size())
        except IOError as e:
            # connection died probably
            if not isinstance(e, StreamEOF):
                logger.debug("IOError in write thread: %s", e)
        except Exception:
            logger.warning("Unhandled exception in write thread", exc_info=True)

        self.set_state(self.State.kDead)
        self.m_active = False
        self.m_stream.close()  # also kill read thread

        with self.m_shutdown_mutex:
            self.m_write_shutdown = True

    def queueOutgoing(self, msg):
        with self.m_pending_mutex:

            # Merge with previous.  One case we don't combine: delete/assign loop.
            msgtype = msg.type
            if msgtype in [kEntryAssign, kEntryUpdate]:

                # don't do this for unassigned id's
                msg_id = msg.id
                if msg_id == 0xFFFF:
                    self.m_pending_outgoing.append(msg)
                    return

                mpend = self.m_pending_update.get(msg_id)
                if mpend is not None and mpend.first != 0:
                    # overwrite the previous one for this id
                    oldidx = mpend.first - 1
                    oldmsg = self.m_pending_outgoing[oldidx]
                    if (
                        oldmsg
                        and oldmsg.type == kEntryAssign
                        and msgtype == kEntryUpdate
                    ):
                        # need to update assignment with seq_num and value
                        oldmsg = Message.entryAssign(
                            oldmsg.str, msg_id, msg.seq_num_uid, msg.value, oldmsg.flags
                        )

                    else:
                        oldmsg = msg  # easy update

                    self.m_pending_outgoing[oldidx] = oldmsg

                else:
                    # new, remember it
                    pos = len(self.m_pending_outgoing)
                    self.m_pending_outgoing.append(msg)
                    self.m_pending_update[msg_id] = Pair(pos + 1, 0)

            elif msgtype == kEntryDelete:
                # don't do this for unassigned id's
                msg_id = msg.id
                if msg_id == 0xFFFF:
                    self.m_pending_outgoing.append(msg)
                    return

                # clear previous updates
                mpend = self.m_pending_update.get(msg_id)
                if mpend is not None:
                    if mpend.first != 0:
                        self.m_pending_outgoing[mpend.first - 1] = None

                    if mpend.second != 0:
                        self.m_pending_outgoing[mpend.second - 1] = None

                    self.m_pending_update[msg_id] = _empty_pair

                # add deletion
                self.m_pending_outgoing.append(msg)

            elif msgtype == kFlagsUpdate:
                # don't do this for unassigned id's
                msg_id = msg.id
                if id == 0xFFFF:
                    self.m_pending_outgoing.append(msg)
                    return

                mpend = self.m_pending_update.get(msg_id)
                if mpend is not None and mpend.second != 0:
                    # overwrite the previous one for this id
                    self.m_pending_outgoing[mpend.second - 1] = msg

                else:
                    # new, remember it
                    pos = len(self.m_pending_outgoing)
                    self.m_pending_outgoing.append(msg)
                    self.m_pending_update[msg_id] = Pair(0, pos + 1)

            elif msgtype == kClearEntries:
                # knock out all previous assigns/updates!
                for i, m in enumerate(self.m_pending_outgoing):
                    if not m:
                        continue

                    t = m.type
                    if t in [
                        kEntryAssign,
                        kEntryUpdate,
                        kFlagsUpdate,
                        kEntryDelete,
                        kClearEntries,
                    ]:
                        self.m_pending_outgoing[i] = None

                self.m_pending_update.clear()
                self.m_pending_outgoing.append(msg)

            else:
                self.m_pending_outgoing.append(msg)

    def postOutgoing(self, keep_alive):
        with self.m_pending_mutex:
            # optimization: don't call monotonic unless needed
            # now = monotonic()
            if not self.m_pending_outgoing:
                if not keep_alive:
                    return

                # send keep-alives once a second (if no other messages have been sent)
                now = monotonic()
                if (now - self.m_last_post) < 1.0:
                    return

                self.m_outgoing.put((Message.keepAlive(),))

            else:
                now = monotonic()
                self.m_outgoing.put(self.m_pending_outgoing)

                self.m_pending_outgoing = []
                self.m_pending_update.clear()

            self.m_last_post = now
Ejemplo n.º 20
0
class InterBridge:
    publishers = [
        'thermal',
        'controlsState',
        'model',
        'health',
        'carState',
        'carControl',
        'plan',
        'liveLocation',
        'liveMpc',
        'liveLongitudinalMpc',
        'driverState',
        'liveParameters',
        'pathPlan',
        'carParams',
        'dMonitoringState',
        'testJoystick',
    ]

    def __init__(self, sm=None, pm=None, can_sock=None):
        # Initialize received messages queue
        self.msgs_queue = Queue()

        # Setup sockets
        self.pm = pm
        if self.pm is None:
            self.pm = messaging.PubMaster(['testJoystick'])

        self.sm = sm
        if self.sm is None:
            self.sm = messaging.SubMaster(self.publishers)

        self.rk = Ratekeeper(RATE, print_delay_threshold=None)

    def sock_msg_received(self, client, server, msg):
        self.msgs_queue.put(msg)

    def sock_msg_send(self, msg):
        pass

    def step(self):
        # Send msg from ZMQ to Socket, only if there are connected clients
        if self.count_clients():
            self.sm.update(0)
            send_msg = {}
            for publisher in self.publishers:
                if self.sm.updated[publisher]:
                    send_msg[publisher] = self.sm[publisher].to_dict()
                    send_msg[publisher]['logMonoTime'] = self.sm.logMonoTime[
                        publisher]
                    # Hack, convert known bytes value to hex (bytes are not serializable)
                    if publisher == 'carParams' and send_msg[publisher][
                            'carFw']:
                        for idx, val in enumerate(
                                send_msg[publisher]['carFw']):
                            send_msg[publisher]['carFw'][idx][
                                'fwVersion'] = val['fwVersion'].hex()

            if send_msg:
                self.sock_msg_send(send_msg)

        # Send msg from Socket to ZMQ (only testJoystick!)
        while not self.msgs_queue.empty():
            msg = self.msgs_queue.get()

            if 'opEdit' in msg:
                if 'loadRequest' in msg['opEdit']:
                    try:
                        with open(OP_PARAMS_PATH, 'r') as file:
                            data = file.read()
                            self.sock_msg_send({'opEdit': json.loads(data)})
                    except (FileNotFoundError, PermissionError):
                        self.sock_msg_send(
                            {'error': "File op_params.json not found."})
                else:
                    try:
                        with open(OP_PARAMS_PATH, 'w') as file:
                            file.write(json.dumps(msg['opEdit'], indent=2))
                    except PermissionError:
                        self.sock_msg_send({
                            'error':
                            "Can't write op_params.json, not enough permissions."
                        })

            elif 'testJoystick' in msg:
                dat = messaging.new_message('testJoystick')
                testJoystick = dat.testJoystick
                testJoystick.axes = msg['testJoystick']['axes']
                testJoystick.buttons = msg['testJoystick']['buttons']
                testJoystick.enabled = msg['testJoystick']['enabled']
                testJoystick.axesMode = msg['testJoystick']['axesMode']
                self.pm.send('testJoystick', dat)

    def interbridged_thread(self, count_callback):
        self.count_clients = count_callback
        while True:
            self.step()
            self.rk.keep_time()
Ejemplo n.º 21
0
Archivo: app.py Proyecto: dgomes/cusca
class Camera(object):
    def __init__(self, url, callback=None):
        self.engine = detect_image.Engine(MODEL_FILE, LABELS_FILE)
        self.rtsp_url = url
        self.frames = SimpleQueue()
        self.callback = callback
        self._event = False
        self.event_detected = False

        self.configuration = {
            CONF_ARMED: True,
            CONF_MJPEG_FPS: FPS,
            CONF_PF: PF,
            CONF_THRESHOLD: detect_image.TF_THRESHOLD,
            CONF_EVENT_BUFFER: BUFFER_SIZE,
        }

        self.set_buffer()

    def set_buffer(self, buffer_size=BUFFER_SIZE):
        self.current_event = deque(maxlen=buffer_size)
        self.last_events = deque(maxlen=buffer_size)
        self.cycle = deque(maxlen=buffer_size)

    @property
    def event_detected(self):
        return self._event

    @event_detected.setter
    def event_detected(self, val):
        if self.callback and val != self._event:
            self.callback("event_detected", val)
        self._event = val

    def capture_frames(self):
        # This thread captures frames exclusively
        container = av.open(self.rtsp_url)

        # Experiments to improve CPU performance
        #container.streams.video[0].thread_type = 'AUTO'
        #container.streams.video[0].codec_context.skip_frame = 'NONKEY'

        #Init screen
        self.last_events.append(next(container.decode(video=0)).to_image())
        self.cycle = self.last_events.copy()

        fc = 0
        for frame in container.decode(video=0):
            fc = (fc + 1) % (1 / PF)
            if fc == 0:
                self.frames.put(frame)

    def process_frames(self):
        # This thread processes frames and can handle other tasks
        while True:
            frame = self.frames.get()
            img = frame.to_image()
            if self.configuration[CONF_ARMED]:
                d_img, prob = self.engine.detect_image(
                    img, threshold=self.configuration[CONF_THRESHOLD])
            else:
                d_img, prob = img, 0

            if d_img:
                self.event_detected = True
                self.current_event.append(d_img)
                if prob > 0:
                    self.callback("event_probability", prob)
            else:
                self.event_detected = False

            if not self.event_detected and len(self.current_event) or \
                len(self.current_event) == self.current_event.maxlen:

                self.last_events.extend(self.current_event)
                self.cycle = self.last_events.copy()  #copy which we rotate
                self.current_event.clear()

    def get_frame(self):
        frame = self.cycle[0]
        self.cycle.rotate(-1)
        return frame

    def last_frame(self):
        frame = self.last_events[-1]
        return frame
Ejemplo n.º 22
0
class XBeeModuleSim:
    def __init__(self, gs_address: bytes):
        """
        :brief: Constructor.
        In addition to constructing this, if any useful work is to be done then the rocket_callback and ground_callback attributes should be set - by default they are simply no-op functions.
        """
        assert len(gs_address) == 8
        self.gs_address = gs_address

        self._frame_parsers = {
            FrameType.TX_REQUEST: self._parse_tx_request_frame,
        }

        # Each element in each queue is a bytearray.
        self._rocket_rx_queue_packed = SimpleQueue()  # RX from RKT

        self._rocket_rx_queue = self._unpack(self._rocket_rx_queue_packed)

        # Callbacks should be of form callback(data), where data is a bytearray.
        # Making a no-op callback to prevent need for checking if callback exists.
        def nop_callback(data):
            return

        self.rocket_callback = nop_callback
        # Is called whenever data needs to be sent to the rocket through SIM - this should be the SIM send()
        # function. The callback should be thread safe.

        self.ground_callback = nop_callback
        # Is called whenever data recieved from the rocket needs to be sent to the rest of the ground station code.
        # The callback should be thread safe.

        self._shutdown_lock = RLock()
        self._is_shutting_down = False

        # Queues are IO bound.
        self._rocket_rx_thread = Thread(target=self._run_rocket_rx,
                                        name="xbee_sim_rocket_rx",
                                        daemon=True)

        self._rocket_rx_thread.start()

    def send_to_rocket(self, data):
        """
        :brief: Queue data to send to rocket following the XBee protocol.
        :param data: bytearray of data.
        """
        reserved = b"\xff\xfe"
        rx_options = b"\x02"
        self.rocket_callback(
            self._create_frame(
                FrameType.RX_INDICATOR,
                SOURCE_ADDRESS + reserved + rx_options + data,
            ))

        SENT_TO_ROCKET_EVENT.increment()

    def recieved_from_rocket(self, data):
        """
        :brief: All data incoming from the rocket should be passed into this method for processing.
        :param data: bytearray of data recieved.
        """
        self._rocket_rx_queue_packed.put(data)

    def _unpack(self, q):
        """
        :brief: Helper generator that unpacks the iterables in a given queue.
        :param q: SimpleQueue of iterables.
        :return: Yields the elements of each iterable in q, in order.
        """
        while True:
            arr = q.get()

            if arr is None:  # Probably a shutdown signal
                with self._shutdown_lock:
                    if self._is_shutting_down:
                        raise ShuttingDown()

            for i in arr:
                yield i

    def _run_rocket_rx(self) -> None:
        """
        :brief: Process the incoming rocket data queue.
        This is the top level function, and handles any unescaped start delimiters.
        """
        LOGGER.debug(f"Xbee sim thread started")

        while True:
            try:
                start = next(self._rocket_rx_queue)
                assert start == START_DELIMITER
                self._parse_API_frame()
            except UnescapedDelimiterError:
                LOGGER.warning("Caught UnescapedDelimiterError exception")
                continue  # drop it and try again
            except ShuttingDown:
                break

        LOGGER.warning("Xbee sim thread shut down")

    def shutdown(self):
        with self._shutdown_lock:
            self._is_shutting_down = True

        self._rocket_rx_queue_packed.put(None)  # Wake up thread

        self._rocket_rx_thread.join()

    # Each frame parser gets iterator to data and the length (as given by the XBee frame standard).
    # Note that since the length as given by the XBee standard includes the frame type, but the frame
    # type is not passed to each frame parser, parsers should take in length - 1 bytes. Data iterator
    # may throw StopIteration; do not catch this.
    def _parse_tx_request_frame(self, data, frame_len) -> None:
        """
        :brief: Parses a TX Request frame, and passes a TX Status packet to the rocket.
        :param data: Iterable
        :param frame_len: length as defined in XBee protocol
        """
        calculated_checksum = FrameType.TX_REQUEST.value  # Checksum includes frame type

        frame_id = next(data)
        calculated_checksum += frame_id

        destination_address = bytearray()
        for _ in range(8):  # 64 bit destination address
            b = next(data)
            destination_address.append(b)
            calculated_checksum += b

        # Reserved 2 bytes. But in one case it's labelled as network address?
        network_addr_msb = next(data)
        calculated_checksum += network_addr_msb

        network_addr_lsb = next(data)
        calculated_checksum += network_addr_lsb

        broadcast_radius = next(data)  # Broadcast radius - not used
        calculated_checksum += broadcast_radius

        transmit_options = next(data)
        calculated_checksum += transmit_options

        payload = bytearray()
        for _ in range(frame_len - 14):
            b = next(data)
            payload.append(b)
            calculated_checksum += b

        received_checksum = next(data)
        calculated_checksum = 0xFF - (calculated_checksum & 0xFF
                                      )  # As per XBee's spec

        if received_checksum != calculated_checksum:
            raise ChecksumMismatchError()

        if (destination_address == bytearray(self.gs_address)
                or destination_address == bytearray(XBEE_BROADCAST_ADDRESS)):
            self.ground_callback(payload)
        else:
            LOGGER.warning(
                f"Discarding tx request frame with destination address other than GS ({destination_address.hex()})"
            )

        # Send acknowledge
        status_payload = bytearray(
            (frame_id, network_addr_msb, network_addr_lsb, 0, 0, 0))
        self.rocket_callback(
            self._create_frame(FrameType.TX_STATUS, status_payload))

    def _parse_API_frame(self) -> None:
        """Parses one XBee API frame based on the rocket_rx_queue."""
        def unescape(q):
            """
            :brief: Undos the escaping in the XBee protocol standard
            :param q: Unpacked queue (with escaped characters)
            """
            for char in q:
                if char == START_DELIMITER:
                    raise UnescapedDelimiterError
                elif char == ESCAPE_CHAR:
                    char = next(q) ^ ESCAPE_XOR
                yield char

        unescaped = unescape(self._rocket_rx_queue)
        frame_len = next(unescaped) << 8
        frame_len += next(unescaped)
        frame_type = next(unescaped)
        assert frame_type in self._frame_parsers
        self._frame_parsers[frame_type](unescaped, frame_len)

        FRAME_PARSED_EVENT.increment()

    def _escape(self, unescaped) -> bytearray:
        """
        :param unescaped: Data to be escaped.
        :type unescaped: iterable of byte (e.g. bytes, bytearray)
        :return: Escaped data.
        :rtype: bytearray
        """
        escaped = bytearray()
        for b in unescaped:
            if b in NEEDS_ESCAPING:
                escaped.append(ESCAPE_CHAR)
                escaped.append(b ^ ESCAPE_XOR)
            else:
                escaped.append(b)
        return escaped

    def _create_frame(self, frame_type, payload):
        """
        Creates a frame.
        :param frame_type: int specifying the type of the frame
        :param payload: bytearray containing frame data.
        :return: bytearray containing the full frame.
        """
        frame_len = len(payload) + 1
        assert frame_len < (1 << 16)  # i.e. can be stored in 2 bytes
        len_lsb = frame_len & 0xFF
        len_msb = frame_len >> 8

        checksum = 0xFF - ((frame_type + sum(payload)) & 0xFF)
        unescaped = (bytearray(
            (len_msb, len_lsb, frame_type)) + payload + bytearray(
                (checksum, )))

        return bytes((START_DELIMITER, )) + self._escape(unescaped)