def init(cls) -> None: PowerMgmt.register_management_change_callback(cls._set_power_save_timeouts) cls._set_power_save_timeouts(PowerMgmt.get_plan()) # to be set defaults Planner.plan(cls._check_time_to_power_save, True) Logging.add_logger(BleLogger()) cls._start_ble() #to be allocated big blocks in the beginning it should prevent memory fragmentation
def __init__(self, ip, port): self.logging = Logging() self.host = ip self.port = port self.max_connections = 20 self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.clients = []
class Resource(object): def __init__(self, torrent_path, isSeeder = False): self.logging = Logging() self.torrent_path = torrent_path torrent = self.parse_metainfo(torrent_path) self.resource_id = torrent["info"]["name"] self.len = torrent["info"]["length"] self.max_piece_size = torrent["info"]["piece length"] self.sha_pieces = torrent["info"]["pieces"].replace("<hex>", "").replace("</hex>", "").split(' ') self.isSeeder = isSeeder self.file_path = "./files/" self.pieces = [] self._create_pieces() # creates the file's pieces def len(self): return self.len def name(self): return self.resource_id def _create_pieces(self): if self.isSeeder: with open(self.file_path + self.resource_id, 'rb') as f: idx = 0 while True: b = f.read(self.max_piece_size) if not b: break self.pieces.append(Piece(b, idx, self.resource_id, self.sha_pieces)) idx += 1 else: # need to create the file if we do not have it and write into it... pass def get_piece(self, index): return self.pieces[index] def sha1_hashes(self): hashes = [] for p in self.sha_pieces: hashes.append(p.gethash()) return hashes def parse_metainfo(self, file_path): try: torrent = open(file_path, 'r') torrent_json = json.loads(torrent.read()) return torrent_json except FileNotFoundError as e: self.logging.log("resource.py -> parse_metainfo", "could not find torrent file", 3, str(e)) print("Could not find torrent file") except Exception as e: print("could not parse json: " + str(e))
class Bot(object): ping_pattern = re.compile('^PING (?P<payload>.*)') chanmsg_pattern = re.compile(':(?P<nick>.*?)!\S+\s+?PRIVMSG\s+#(?P<channel>[-\w]+)\s+:(?P<message>[^\n\r]+)') def __init__(self, server, ident, channel, path): self._dispatch_table = ( (self.ping_pattern, self.handle_ping), (self.chanmsg_pattern, self.handle_chanmsg)) self._logger = Logging(path) self.server = server self.ident = ident self.channel = channel self.start() def start(self): self._connection = Connection(self.server) self.register_connection(self.ident) self.join_channel(self.channel) def loop(self): while True: try: line = self._connection.read() except socket.error as se: trackeback.print_exc() print "Caught exception. Will reconnect." del(self._connection) time.sleep(60) self.start() continue for pattern, handler in self._dispatch_table: match = pattern.match(line) if match: handler(**match.groupdict()) def handle_ping(self, payload): self._connection.send("PONG " + payload) def handle_chanmsg(self, nick, channel, message): self._logger.write(nick + ": " + message) def register_connection(self, ident): nick, passw = ident self._connection.send("PASS " + passw) self._connection.send("NICK " + nick) self._connection.send("USER " + nick + " 0 * :" + nick) def join_channel(self, channel): chan, passw = channel self._connection.send("JOIN " + chan + " " + passw)
def __init__(self, torrent_path, isSeeder = False): self.logging = Logging() self.torrent_path = torrent_path torrent = self.parse_metainfo(torrent_path) self.resource_id = torrent["info"]["name"] self.len = torrent["info"]["length"] self.max_piece_size = torrent["info"]["piece length"] self.sha_pieces = torrent["info"]["pieces"].replace("<hex>", "").replace("</hex>", "").split(' ') self.isSeeder = isSeeder self.file_path = "./files/" self.pieces = [] self._create_pieces() # creates the file's pieces
class Main: def __init__(self): self.LOGGING = Logging(MASTER_ADDR) def run(self): self.LOGGING.run() while True: try: deg, pwr = unpack("Hf", self.LOGGING.read()) print("Degree:", deg, "Power:", pwr) except KeyboardInterrupt: exit() except Exception as e: print("Exception:", e)
def __init__(self, max_upload_rate, max_download_rate): """ TODO: implement the class constructor """ Server.__init__(self, '127.0.0.1', self.PORT) # inherites methods from Server class COULD BE WRONG TO SET LISTENING IP TO 0.0.0.0 threading.Thread(target=self.listen, args=()) Client.__init__(self) # inherites methods from Client class self.status = self.PEER self.chocked = False self.interested = False self.max_download_rate = max_download_rate self.max_upload_rate = max_upload_rate self.logging = Logging() self.swarm_clients = [] self.myIp = "x.x.x.x" self.mySocketId = "00000"
def __init__(self, server, ident, channel, path): self._dispatch_table = ( (self.ping_pattern, self.handle_ping), (self.chanmsg_pattern, self.handle_chanmsg)) self._logger = Logging(path) self.server = server self.ident = ident self.channel = channel self.start()
def __init__(self, address_driver_front, address_driver_rear, addr_power=None, power_measurement_period=1): self.speed = Speed.stop self.manoeuver = Manoeuver.straight self.direction = Direction.forward self.logging = Logging("chassis") self.power = PowerBlock(addr_power, power_measurement_period) self.front_driver = MotorDriverBlock(address_driver_front) self.rear_driver = MotorDriverBlock(address_driver_rear)
def __init__(self, block_type: int, address: int): self.type_id = block_type.id self.address = address if address else block_type.id #default block i2c address is equal to its block type self.logging = Logging(block_type.name) self.block_version = self._get_block_version() self.block_type_valid = False self.power_save_level = PowerSaveLevel.NoPowerSave self._tiny_write_base_id(_power_save_command, self.power_save_level.to_bytes(1, 'big'), True) #wake up block functionality if not self.block_version: self.logging.warning("module with address 0x%x is not available", self.address) elif self.block_version[0] != self.type_id: self.logging.error( "unexpected block type. expected: %d, returned: %d", self.type_id, self.block_version[0]) else: self.block_type_valid = True
def main(): parser = argparse.ArgumentParser() try: parser.add_argument("--command-log", "-c", help="Command log", default="commands.txt") parser.add_argument("--output-log", "-o", help="Enable output logging", action="store_true") parser.add_argument("--output-log-dir", "-d", help="Output log directory") parser.add_argument("--interval", "-i", help="Interval between command execution") parser.add_argument( "--execute", "-e", help="List of commands to execute within intervals") parser.set_defaults(output_log=False, interval=0) args = parser.parse_args() utility = Utility() logging = Logging(args.command_log, args.output_log) logger_shell = LoggerShell(utility, logging, args.interval) if args.execute: print logger_shell.onecmd('load' + args.execute) logger_shell.set_prompt_name(utility.clean_date_stamp()) logger_shell.cmdloop() except ExitShellException: pass
# Copyright (c) 2022 Jakub Vesely # This software is published under MIT license. Full text of the license is available at https://opensource.org/licenses/MIT from logging import Logging from power_mgmt import PowerMgmt import sys import time import uasyncio import gc import micropython logging = Logging("planner") unhandled_exception_prefix = "Unhandled exception" class TaskProperties: kill = False waiting_start_ms = 0 waiting_time_ms = 0 class Planner: _performed_tasks = { } #expected dict with task handle as key and tasks properties as value _handle_count = 0 _loop = uasyncio.get_event_loop() _power_mgmt = PowerMgmt() @classmethod def _get_next_handle(cls): handle = cls._handle_count
def __init__(self): self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.logging = Logging()
class Client(object): def __init__(self): self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.logging = Logging() def connect(self, host_ip, port): try: self.client_socket.connect((host_ip, port)) self.logging.log( "client.py -> connect", "connected to host: " + str(host_ip) + ":" + str(port)) except socket.error as err: self.logging.log( "client.py -> connect", "failed to connect to host on: " + str(host_ip) + ":" + str(port), 2, str(err)) def send(self, data): try: serialized = pickle.dumps(data) self.client_socket.send(serialized) self.logging.log("client.py -> send", "data sent" + str(data)) except socket.error as err: self.logging.log("client.py -> send", "could not send to socket", 2, str(err)) def receive(self, memory_allocation_size): try: serialized_data = self.client_socket.recv(memory_allocation_size) deserialized = pickle.loads(serialized_data) self.logging.log("client.py -> receive", "received data: " + str(deserialized)) return deserialized except socket.error as err: print("socket recv failed with error %s" % err) self.logging.log("client.py -> ", "receive", 2, str(err)) except pickle.UnpicklingError as err: self.logging.log("client.py -> receive", "unpickling error!", 2, str(err)) except Exception as e: self.logging.log( "client.py -> receive", "when trying to receive data something bad happened... [", 3, str(e) + "]") return None def close(self): self.logging.log("client.py -> close", " socket closed!") self.client_socket.close()
tab = "tab" shift = "shift" right_shift = "right_shift" insert = "insert" pause = "pause" class KeyCallback: def __init__(self, trigger, callback_type, *args, **kwargs) -> None: self.trigger = trigger self.callback_type = callback_type self.args = args self.kwargs = kwargs logging = Logging("vk") class VirtualKeyboard(): callbacks = [] @classmethod def process_input(cls, key_name: str = None, scan_code: bytes = None): logging.info("key:%s", key_name) for callback in cls.callbacks: if callback.trigger in (key_name, scan_code): Planner.plan(callback.callback_type, *callback.args, **callback.kwargs) @classmethod def add_callback(cls, trigger: callable, callback_type, *args, **kwargs):
def __init__(self) -> None: self.file_path = None self.new_file = False self.dir_content = None self.dir_pos = 0 self.logging = Logging("Shell")
class Server(object): def __init__(self, ip, port): self.logging = Logging() self.host = ip self.port = port self.max_connections = 20 self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.clients = [] def listen(self): print("Listening On " + self.host + ":" + str(self.port)) print("Waiting For Connections...") self.logging.log("server.py -> listen", "Listening On " + self.host + ":" + str(self.port)) threading._start_new_thread(self.wait_for_termination, ()) try: self.server.bind((self.host, self.port)) self.server.listen(self.max_connections) self.accept() except socket.error as e: print("server failed with error: " + str(e)) self.logging.log("server.py -> listen", "server failed with error: ", 2, str(e)) def wait_for_termination(self): # forced server shutdown while True: user_input = input("enter 'quit' to terminate server safely...\n") if user_input == 'quit': self.logging.log("server.py -> wait_for_termination", "server terminated by user") self.server.close() # close server break def accept(self): try: while True: conn, addr = self.server.accept( ) # Note: addr[0] is client IP, addr[1] is socket id threading._start_new_thread(self.handle_connection, (conn, addr)) self.logging.log("server.py -> accept", "new client joined: " + str(addr[1])) except socket.error as err: print("accept new client failed with error %s" % str(err)) self.logging.log("server.py -> accept", "accept new client failed", 2, str(err)) def receive(self, socket_conn, memory_allocation_size): try: serialized_data = socket_conn.recv(memory_allocation_size) deserialized = pickle.loads(serialized_data) self.logging.log("server.py -> receive", "received data: " + str(deserialized)) return deserialized except socket.error as err: print("socket recv failed with error %s" % err) self.logging.log("server.py -> ", "receive", 2, str(err)) except pickle.UnpicklingError as err: self.logging.log("server.py -> receive", "unpickling error!", 2, str(err)) self.logging.log( "server.py -> receive", "an exception occured and receive is returning empty data!!!", 3) return None def send(self, socket_conn, data): try: serialized = pickle.dumps(data) socket_conn.send(serialized) self.logging.log("server.py -> send", "data sent" + str(data)) except socket.error as err: self.logging.log("server.py -> send", "could not send to socket", 2, str(err)) def get_connected_clients_list(self): return self.clients def handle_connection(self, conn, addr): # THIS FUNCTION SHOULD BE OVERRIDEN BY CHILD CLASSES self.logging.log("server.py -> handle_connection", "client connected: " + str(addr[1])) self.clients.append((conn, addr))
self.pwm.start() self.thread = Thread(target = self.run) self.thread.start() def stop(self): self.pwm.stop() self.running = False self.thread.join() import math import random import time import curses from logging import Logging log = Logging("output.csv") screen = curses.initscr() curses.noecho() curses.cbreak() #curses.resizeterm(40,120) screen.nodelay(1) screen.keypad(True) #drawing area prompt = " Change Setpoint:\n\n Up/Down: 5.0\n Left/Right: 1.0 \n +/-: 0.1\n\n 'q' to quit.\n\n" pad_x = 67 pad_y = 20 pad = curses.newpad(pad_y,pad_x)
def __init__(self): self.LOGGING = Logging(MASTER_ADDR)
class Peer(Client, Server): # status PEER = 0 SEEDER = 1 LEECHER = 2 PORT = 5000 # used for local connection def __init__(self, max_upload_rate, max_download_rate): """ TODO: implement the class constructor """ Server.__init__(self, '127.0.0.1', self.PORT) # inherites methods from Server class COULD BE WRONG TO SET LISTENING IP TO 0.0.0.0 threading.Thread(target=self.listen, args=()) Client.__init__(self) # inherites methods from Client class self.status = self.PEER self.chocked = False self.interested = False self.max_download_rate = max_download_rate self.max_upload_rate = max_upload_rate self.logging = Logging() self.swarm_clients = [] self.myIp = "x.x.x.x" self.mySocketId = "00000" def start_download(self, torrent_name): torrent = self.get_metainfo('./metainfo/' + torrent_name) tracker = torrent['announce'].split(':') # tracker info, 0 = ip, 1 = port swarm = self.connect_to_tracker(tracker[0], int(tracker[1]), torrent['info']['name']) self.connect_to_swarm(swarm) print("\n***** P2P client App *****") print("Peer Info: id: " + self.mySocketId + ", IP: " + self.myIp + ":" + str(self.PORT)) print("Tracker/s info: IP: " + torrent['announce']) print("Max download rate: " + str(self.max_download_rate) + " b/s") print("Max upload rate: " + str(self.max_upload_rate) + " b/s") def connect_to_tracker(self, ip_address, port, resource_id): self.connect(ip_address, port) self.send({"resource_id": resource_id}) initial_data = self.receive(1024) try: # set init data self.myIp = initial_data["my_ip"] self.mySocketId = initial_data["socket_id"] return initial_data["swarm"] except Exception as e: self.logging.log("peer.py -> connect_to_tracker", " data from tracker: " + str(initial_data), 3, str(e)) exit() def send_message(self, block, start_index = -1, end_index = -1): # this function is invoked by one of the multithreaded functions. probably from handle_single_peer_connection """ TODO: implement this method (1) Create a message object from the message class (2) Set all the properties of the message object (3) If the start index and end_index are not negative then, that means that the block needs to be sent in parts. implement that situations too. (4) Don't forget to check for exceptions with try-catch before sending messages. Also, don't forget to serialize the message object before being sent :param block: a block object from the Block class :param start_index: the start index (if any) of the data being sent :param end_index: the end index of the data being sent :return: VOID """ pass # ========== THESE 2 FUNCTIONS BELOW ARE HOW WE WILL HANDLE CONNECTIONS TO THE ENTIRE SWARM ==================== def handle_connection(self, conn, addr): # OVERRIDDEN FROM SERVER, (THREADED) # This function is the entry point for peers to send data so we will want to parse the message class to retrieve that data # this functin is already multithreaded so we can loop in here forever without any issues on blocking the main thread self.logging.log("peer.py -> override handle_connection", "client connected: " + str(addr[0])) self.clients.append((conn, addr)) # if requested we need to send our upload/download rate in a message while True: """ TODO: implement this method (1) recieve the message (2) inspect the message (i.e does it have payload) (4) If this was the last block of a piece, then you need to compare the piece with the sha1 from the torrent file if is the same hash, then you have a complete piece. So, set the piece object related to that piece to completed. (5) Save the piece data in the downloads file. (6) Start sharing the piece with other peers. :return: VOID """ # msg = self.receive(conn, self.max_download_rate) def handle_single_peer_connection(self, connected_client, _): while True: data = connected_client.receive(self.max_download_rate) # we check if the peer sends a message and is requesting some data connected_client.send(self.max_upload_rate) # we send the data we have to the requester peer def connect_to_swarm(self, swarm): list_of_peers = swarm.getPeers() # [0] - IP, [1] socket id connected_peers = 1 for p in list_of_peers: if connected_peers <= 5 and str(p[1]) != str(self.mySocketId): # prevent connecting to self (during testing) cl = Client() cl.connect(p[0], self.PORT + connected_peers) threading.Thread(target=self.handle_single_peer_connection, args=(cl, "")).start() self.swarm_clients.append(cl) connected_peers += 1 else: print("Ignored Peer connection due to max 5 connections or connecting to self by accident") def upload_rate(self): #self.get_top_four_peers().... return 5 # sample data for now def download_rate(self): # self.get_top_four_peers() # calculate here # sample data for now return 5 def get_top_four_peers(self): self.top_four = [] return self.top_four def verify_piece_downloaded(self, piece): return piece.is_completed() and not piece.is_corrupted() # ALL BELOW ARE DONE def change_role(self, new_role): if new_role == self.PEER: self.status = self.PEER elif new_role == self.SEEDER: self.status = self.SEEDER elif new_role == self.LEECHER: self.status = self.LEECHER else: self.logging.log("peer.py -> change_role", "incorrect role to set: " + str(new_role)) def get_metainfo(self, torrent_path): try: torrent = open(torrent_path, 'r') torrent_json = json.loads(torrent.read()) return torrent_json except FileNotFoundError as e: self.logging.log("peer.py -> get_metainfo", "could not find torrent file", 3, str(e)) print("Could not find torrent file") except Exception as e: print("could not parse json: " + str(e)) print("no torrent found. Terminating immediately......") exit() def is_chocked(self): return self.chocked def is_interested(self): return self.interested def chocked(self): self.chocked = True def unchocked(self): self.chocked = False def interested(self): self.interested = True def not_interested(self): self.interested = False
def _create_logging() -> Logging: return Logging()
import json import ec2lib from logging import Logging # Setup commandline arguments and check that the tags are matched parser = argparse.ArgumentParser(description='ec2 instance handler based on groups as defined by tag name and value') parser.add_argument('Action') parser.add_argument('Tag', nargs='*', type=str) args = parser.parse_args() # Check the tags and store in dictionary Name:Value TagDict = [] TagDict = ec2lib.checktags(args.Tag) #Setup logging ec2log = Logging('ec2log2.txt') # Open connection ---------------------------------------------------------------------- # Connect to AWS and check on all instances where the name:value pairs match cmd = "aws ec2 describe-instances" if len(args.Tag) != 0: cmd = cmd + " --filters" for key in TagDict: cmd = cmd + " \"Name=tag:" + key + ",Values=" + TagDict[key] + "\"" ec2log.logit("$ " + cmd, True) p = ec2lib.RunCliCommand(cmd, False) myjson = "" #Convert io.BufferedReader, byte data to json for line in p.stdout: line = line.decode(encoding='utf-8') myjson = myjson + line
class BlockBase: i2c = machine.I2C(0, scl=machine.Pin(22), sda=machine.Pin(21), freq=100000) def __init__(self, block_type: int, address: int): self.type_id = block_type.id self.address = address if address else block_type.id #default block i2c address is equal to its block type self.logging = Logging(block_type.name) self.block_version = self._get_block_version() self.block_type_valid = False self.power_save_level = PowerSaveLevel.NoPowerSave self._tiny_write_base_id(_power_save_command, self.power_save_level.to_bytes(1, 'big'), True) #wake up block functionality if not self.block_version: self.logging.warning("module with address 0x%x is not available", self.address) elif self.block_version[0] != self.type_id: self.logging.error( "unexpected block type. expected: %d, returned: %d", self.type_id, self.block_version[0]) else: self.block_type_valid = True def _raw_tiny_write(self, type_id: int, command: int, data=None, silent=False): try: payload = type_id.to_bytes(1, 'big') + command.to_bytes(1, 'big') if data: payload += data #self.logging.info(("write", payload)) self.i2c.writeto(self.address, payload) except OSError: if not silent: self.logging.error( "tiny-block with address 0x%02X is unavailable for writing", self.address) def _check_type(self, type_id): return type_id in (_i2c_block_type_id_base, self.type_id) and (_i2c_block_type_id_base or self.block_type_valid) def __tiny_write_common(self, type_id: int, command: int, data=None, silent=False): """ writes data to tiny_block via I2C @param type_id: block type id @param command: one byte command @param data: specify input data for entered command """ if self._check_type(type_id): self._raw_tiny_write(type_id, command, data, silent) else: self.logging.error("invalid block type - writing interupted") def _tiny_write_base_id(self, command: int, data=None, silent=False): self.__tiny_write_common(_i2c_block_type_id_base, command, data, silent) def _tiny_write(self, command: int, data=None, silent=False): if not self.is_available(): return self.__tiny_write_common(self.type_id, command, data, silent) def __tiny_read_common(self, type_id: int, command: int, in_data: bytes = None, expected_length: int = 0, silent=False): """ reads data form tiny_block via I2C @param type_id: block type id @param command: one byte command @param in_data: specify input data for entered command @param expected_length: if defined will be read entered number of bytes. If None is expected length as a first byte @return provided bytes. If size is first byte is not included to output data """ if self._check_type(type_id): self._raw_tiny_write(type_id, command, in_data, silent) try: data = self.i2c.readfrom(self.address, expected_length, True) return data except OSError: if not silent: self.logging.error( "tiny-block with address 0x%02X is unavailable for reading", self.address) return None else: self.logging.error("invalid block type - reading interupted") return None def _tiny_read_base_id(self, command: int, in_data: bytes = None, expected_length: int = 0, silent=False): return self.__tiny_read_common(_i2c_block_type_id_base, command, in_data, expected_length, silent) def _tiny_read(self, command: int, in_data: bytes = None, expected_length: int = 0): if not self.is_available(): return None return self.__tiny_read_common(self.type_id, command, in_data, expected_length) def change_block_address(self, new_address): self._tiny_write_base_id(_change_i2c_address_command, new_address.to_bytes(1, 'big')) self.address = new_address time.sleep(0.1) #wait to the change is performed and stopped def _get_block_version(self): """ returns block_type, pcb version, adjustment_version """ data = self._tiny_read_base_id(_get_module_version_command, None, 3, silent=True) if not data: return None return (data[0], data[1], data[2]) def is_available(self): return self.block_type_valid #available and valid block version def power_save(self, level: PowerSaveLevel) -> None: self.power_save_level = level self._tiny_write_base_id(_power_save_command, level.to_bytes(1, 'big'))
class ActiveVariable(): logging = Logging("act_var") def __init__(self, initial_value=None, renew_period=0, renew_func=None): """ @param initial_value: if is set Active variable will be preset to this value @param renew_period: renew_func will be called with this period if this value > 0 @param renew_func: this method will be called periodically if renew_period is > 0 or if get is called with the parameter "force" """ self._old_value = initial_value self._value = initial_value self._renew_period = renew_period self._renew_func = renew_func self._renew_handle = None self._listeners = list() self._handle_count = 0 def change_period(self, new_period): self._renew_period = new_period if self._renew_handle: Planner.kill_task(self._renew_handle) self._renew_handle = None if self._renew_period > 0: self._renew_handle = Planner.repeat(self._renew_period, self._update_value) def set(self, value): if value is None: return #nothing to compare self._old_value = self._value self._value = value for listener in self._listeners: processed = False _type = listener[1] repeat = listener[2] if _type == Conditions.equal_to: if isinstance(value, float) or isinstance(listener[3], float): if (self._old_value is None or not math.isclose(self._old_value, listener[3])) and math.isclose(value, listener[3]): listener[4](*listener[5], **listener[6]) processed = True else: if (self._old_value is None or self._old_value != listener[3]) and value == listener[3]: listener[4](*listener[5], **listener[6]) processed = True elif _type == Conditions.not_equal_to: if isinstance(value, float) or isinstance(listener[3], float): if (self._old_value is None or math.isclose(self._old_value, listener[3])) and not math.isclose(value, listener[3]): listener[4](*listener[5], **listener[6]) processed = True else: if (self._old_value is None or self._old_value == listener[3]) and value != listener[3]: listener[4](*listener[5], **listener[6]) processed = True elif _type == Conditions.less_than: if (self._old_value is None or self._old_value >= listener[3]) and value < listener[3]: listener[4](*listener[5], **listener[6]) processed = True elif _type == Conditions.more_than: if (self._old_value is None or self._old_value <= listener[3]) and value > listener[3]: listener[4](*listener[5], **listener[6]) processed = True elif _type == Conditions.in_range: if (self._old_value is None or self._old_value < listener[3] or self._old_value >= listener[4]) and value >= listener[3] and value < listener[4]: listener[5](*listener[6], **listener[7]) processed = True elif _type == Conditions.out_of_range: if (self._old_value is None or self._old_value >= listener[3] and self._old_value < listener[4]) and (value < listener[3] or value >= listener[4]): listener[5](*listener[6], **listener[7]) processed = True elif _type == Conditions.value_changed: if value != self._old_value: listener[3](*listener[4], **listener[5]) processed = True elif _type == Conditions.value_updated: listener[3](*listener[4], **listener[5]) processed = True else: self.logging.error("unknown listener type %s" % str(listener[1])) if processed and not repeat: self._listeners.remove(listener) def get(self, force=False): if not self._renew_handle or force: self._update_value() return self._value def get_previous_value(self): return self._old_value def _update_value(self): if self._renew_func: self.set(self._renew_func()) def _add_listener(self, listener): if not self._listeners and self._renew_period > 0: self._renew_handle = Planner.repeat(self._renew_period, self._update_value) self._listeners.append(listener) self._handle_count += 1 return listener[0] def remove_trigger(self, handle): for listener in self._listeners: if listener[0] == handle: self._listeners.remove(listener) if not self._listeners: Planner.kill_task(self._renew_handle) self._renew_handle = None return True return False def equal_to(self, expected, repetitive, function, *args, **kwargs): """ provided function with arguments will be called when measured value is newly equal to expected value """ return self._add_listener((self._handle_count, Conditions.equal_to, repetitive, expected, function, args, kwargs)) def not_equal_to(self, expected, repetitive, function, *args, **kwargs): """ provided function with arguments will be called when measured value is newly not equal to expected value """ return self._add_listener((self._handle_count, Conditions.not_equal_to, repetitive, expected, function, args, kwargs)) def less_than(self, expected, repetitive, function, *args, **kwargs): """ provided function with arguments will be called when measured value is newly smaller than expected value """ return self._add_listener((self._handle_count, Conditions.less_than, repetitive, expected, function, args, kwargs)) def more_than(self, expected, repetitive, function, *args, **kwargs): """ provided function with arguments will be called when measured value is newly bigger than expected value """ return self._add_listener((self._handle_count, Conditions.more_than, repetitive, expected, function, args, kwargs)) def in_range(self, expected_min, expected_max, repetitive, function, *args, **kwargs): """ provided function with arguments will be called when measured value is newly bigger or equal to expected_min value and smaller that expected_max value """ return self._add_listener((self._handle_count, Conditions.in_range, repetitive, expected_min, expected_max, function, args, kwargs)) def out_of_range(self, expected_min, expected_max, repetitive, function, *args, **kwargs): """ provided function with arguments will be called when measured value is newly smaller than expected_min value or bigger or equal to expected_max value """ return self._add_listener((self._handle_count, Conditions.out_of_range, repetitive, expected_min, expected_max, function, args, kwargs)) def changed(self, repetitive, function, *args, **kwargs): """ provided function with arguments will be called when measured value is different that last time measured value """ return self._add_listener((self._handle_count, Conditions.value_changed, repetitive, function, args, kwargs)) def updated(self, repetitive, function, *args, **kwargs): """ provided function with arguments will be called always when a value is measured """ return self._add_listener((self._handle_count, Conditions.value_updated, repetitive, function, args, kwargs))
class Shell(): _b_false = b"\0" _b_true = b"\1" events_file_name = "events.mpy" import_error_file_name = ".import_error" def __init__(self) -> None: self.file_path = None self.new_file = False self.dir_content = None self.dir_pos = 0 self.logging = Logging("Shell") def file_exists(self, path): try: file = open(path, "r") file.close() return True except OSError: # open failed return False def remove_file(self, file_path): os.remove(file_path) def rename_file(self, orig_file_path, dest_file_path): os.rename(orig_file_path, dest_file_path) def _reboot(self): MainBlock.reboot() def _import_events(self): try: import events #events will planned self.logging.info("events loaded successfully") return True except Exception as error: self.logging.exception( error, extra_message="events.py was not imported properly") import sys sys.print_exception(error, sys.stdout) return False def load_events(self): if self.file_exists(self.events_file_name): self._import_events() else: hidden_file_name = "." + self.events_file_name if self.file_exists( hidden_file_name ): #if events.py has been hidden to do not be loaded, return them visible to be changed or used next time self.rename_file(hidden_file_name, self.events_file_name) def read_chunks(file, chunk_size): while True: data = file.read(chunk_size) if not data: break yield data def _get_file_checksum(self, file_path): sha1 = hashlib.sha1("") with open(file_path, "rb") as file: while True: chunk = file.read(1000) if not chunk: break sha1.update(chunk) return sha1.digest() def command_request(self, data): if data and len(data) > 0: command = data[0] if command == _cmd_version: return self._b_true elif command == _cmd_stop_program: print("cmd_stop_program") if self.file_exists(self.events_file_name): print("events_file will be renamed") self.rename_file(self.events_file_name, "." + self.events_file_name) print("events_file renamed") print("reboot planned") Planner.postpone(0.1, self._reboot) return self._b_true elif command == _cmd_start_program: return self._b_true if self._import_events() else self._b_false elif command == _cmd_get_next_file_info: if not self.dir_content: self.dir_content = os.listdir("/") if self.dir_pos >= len(self.dir_content): return self._b_false name = self.dir_content[self.dir_pos] self.dir_pos += 1 return self._get_file_checksum(name) + name.encode("utf-8") elif command == _cmd_remove_file: filename = data[1:] self.remove_file(filename) return self._b_true elif command == _cmd_handle_file: self.handeled_file_path = data[1:] self.new_file = True return self._b_true elif command == _cmd_get_file_checksum: return self._get_file_checksum(self.handeled_file_path) elif command == _cmd_append: data = data[1:] if self.new_file: file = open(self.handeled_file_path, "wb") self.new_file = False else: file = open(self.handeled_file_path, "ab") file.write(data) file.close() return self._b_true else: return None