def load_config(self): config_kwargs = { name: getattr(local_config, name) for name in dir(local_config) if not name.startswith("_") } self.server_config = ServerConfig(debug=self.debug, **config_kwargs) self.logger.info(self.server_config.sanitized_dict())
def update_database_to_latest_version(): # get config cfg = ServerConfig() # get current version current_schema_version = get_current_schema_version(cfg) # get target version target_schema_version = get_target_schema_version() # check if any work to be done if (current_schema_version > target_schema_version): print("Catalog Schema Version is from the future (current=%d > target=%d)." % ( current_schema_version, target_schema_version)) return if (current_schema_version == target_schema_version): print("Catalog Schema Version is already up to date (version=%d)." % target_schema_version) return # read possible update sql files foundfiles = get_update_files( current_schema_version, cfg.values['catalog_database_type']) if DEBUG: print("files: %s" % foundfiles) updatefiles = {} for f in foundfiles: version = int(f.split("/")[-1].split(".")[0]) updatefiles[version] = f if DEBUG: print("updatefiles: %s" % updatefiles) # check all necessary update files exist for version in range(current_schema_version + 1, target_schema_version + 1): if DEBUG: print("checking... %d" % version) if version not in updatefiles.keys(): print( "ERROR: SQL Update File Not Found for Schema Version %d" % version) return # run each update sql file for version in range(current_schema_version + 1, target_schema_version + 1): print("Updating to Catalog Schema... %d" % version) if DEBUG: print("running: %s" % updatefiles[version]) result = cfg.exec_sql_file(updatefiles[version]) if (result[2].decode('utf-8') != ""): print("ERROR: SQL did not complete...") print(result[2]) return if DEBUG: print("sql result...") if DEBUG: print(result) # update schema_version in database update_schema_version(cfg, version) # done print("Done.")
def __init__(self, server_config, charset='utf-8', debug_mode=False): """Inicialização de servidor externo, recebe uma instancia de ServerConfig""" # TODO: docstring if isinstance(server_config, ServerSignature): server_config = ServerConfig(server_config, charset, debug_mode) self.server_config = server_config
def update_database_to_latest_version(): # get config cfg = ServerConfig() # get current version current_schema_version = get_current_schema_version(cfg) # get target version target_schema_version = get_target_schema_version() # check if any work to be done if current_schema_version > target_schema_version: print_error('Catalog Schema Version is from the future (current=%d > target=%d).' % ( current_schema_version, target_schema_version)) return if current_schema_version == target_schema_version: print('Catalog Schema Version is already up to date (version=%d).' % target_schema_version) return # read possible update sql files foundfiles = get_update_files(current_schema_version, cfg.get('catalog_database_type')) print_debug('files: %s' % foundfiles) updatefiles = {} for f in foundfiles: version = int(f.split('/')[-1].split('.')[0]) updatefiles[version] = f print_debug('updatefiles: %s' % updatefiles) # check all necessary update files exist for version in range(current_schema_version + 1, target_schema_version + 1): print_debug('checking... %d' % version) if version not in updatefiles.keys(): print_error('ERROR: SQL Update File Not Found for Schema Version %d' % version) return # run each update sql file for version in range(current_schema_version + 1, target_schema_version + 1): print('Updating to Catalog Schema... %d' % version) print_debug('running: %s' % updatefiles[version]) result = cfg.exec_sql_file(updatefiles[version]) if result[2].decode('utf-8') != '': print_error('ERROR: SQL did not complete...') print_error(result[2]) return print_debug('sql result...') print_debug(result) # update schema_version in database update_schema_version(cfg, version) # done print('Done.')
def __init__(self, config_name="config.json"): self.__config = ServerConfig(config_name) logging.basicConfig(filename=self.__config.log_file, level=logging.DEBUG, format='%(asctime)s %(message)s') self.thread_pool = ThreadPool() cache_dir = Path.cwd() / self.__config.cache_dir self.cache = CacheStorage(cache_dir) self.request_handler = RequestHandler(self.cache)
def test_rebalance_for_object_count(self): # =-=-=-=-=-=-=- # read server_config.json and .odbc.ini cfg = ServerConfig() root_dir = "/tmp/irods/big_dir" if os.path.exists(root_dir): shutil.rmtree(root_dir) os.makedirs(root_dir) for i in range(30): path = root_dir + "/rebalance_testfile_" + str(i) output = commands.getstatusoutput('dd if=/dev/zero of=' + path + ' bs=1M count=1') print output[1] assert output[0] == 0, "dd did not successfully exit" # get initial object count initial_output = getiCmdOutput(s.adminsession, "iadmin lr demoResc") objcount_line = initial_output[0].splitlines()[-1] initial_objcount = int(objcount_line.split(":")[-1].strip()) print "initial: " + str(initial_objcount) # put the new files assertiCmd(s.adminsession, "iput -r " + root_dir) # =-=-=-=-=-=-=- # drop several rows from the R_DATA_MAIN table to jkjjq:q cfg.exec_sql_cmd("delete from R_DATA_MAIN where data_name like 'rebalance_testfile_1%'") # rebalance assertiCmd(s.adminsession, "iadmin modresc demoResc rebalance") # expected object count expected_objcount = initial_objcount + 19 # 19 = 30 initial - 11 (1 and 10 through 19) deleted files print "expected: " + str(expected_objcount) assertiCmd(s.adminsession, "iadmin lr demoResc", "LIST", "resc_objcount: " + str(expected_objcount))
def __init__(self, signature, start_server=None, timeout_limit = 2, debug_mode = False): """Inicialização do servidor, necessita o recebimento da assinatura do servidor (instancia de ServerSignature). Opcionalmente também pode ser receber uma instancia de external server para se conectar a rede da qual ele faz parte. Não sendo informado um servidor, será inicializado uma nova rede. Outro parâmetro opcinal é o timeout_limit, default 2, que é a quantidade em segundos que o servidor deve esperar uma requisição antes de considerar que o servidor para quem foi enviada não está no ar.""" self.server_config = ServerConfig(signature, debug_mode = debug_mode) self.timeout_limit = timeout_limit self._server_list = set() thr = self.server_config.server_thread(self._msg_recv) thr.start() self.server_thread = thr self.token_thread_list = [] self.expecting = False if start_server: self.expecting = 'serverList' start_server.get_server_list(self.signature)
class Response: __config = ServerConfig() __error_page = ErrorPage() def __init__(self, code, def_headers=None): """ HTTP response data class :type def_headers: dict :type code: int """ if def_headers is None: def_headers = self.__config.default_headers.copy() self.code = code if self.__is_error(code): self.body = self.__error_page.get_page_content(code) else: self.body = "" self.headers = def_headers if len(self.body) != 0: body_len = len(bytes(self.body, "utf-8")) self.headers["Content-Length"] = body_len def add_header(self, name, value): """Adds a header for response :type value: str :type name: str """ self.headers[name] = value def get_headers_string(self): """Return headers as a string""" response = "" for row in self.headers: response += f"{row}: {self.headers[row]}\r\n" return response def get_bytes(self): """Returns response as byte array""" return bytearray( f"HTTP/1.1 {self.code}\r\n" f"{self.get_headers_string()}\r\n" f"{self.body}", "utf-8") @staticmethod def __is_error(code): return int(code / 100) == 4 or int(code / 100) == 5
def _msg_recv(self, bts_msg, orig_tup): """Método interno para recebimento de mensagem, enviado como callback para a thread separada. Recebe o byte string da mensagem e a tupla IP porta de quem a mensagem foi recebida. Retornar valores valorados como falso implica em para a thread de recebimento de mensagens. Trata mensagens: - get_server_list - hello - halt - confirm_token (se estiver aguardado a confirmação) - server_list (se estiver aguardado a lista) - token (se o valor não foi processado em nenhuma opção anterior e a evaluação da mensagem é um token, o token é processado.)""" str_msg = self.server_config.decode(bts_msg) sig = ServerSignature(*orig_tup, True) conf = ServerConfig(sig) if str_msg == 'get_server_list': return self._recv_server_list_request(conf) if str_msg == 'hello': return self._recv_hello(conf) if str_msg == 'halt': return False if str_msg == 'confirm_token': if self.expecting == 'confirm_token': return self._recv_confirm_token() if self.expecting == 'serverList': try: result = self._recv_server_list(str_msg) self.expecting = False return result except: pass try: token = eval(str_msg) if isinstance(token, Token): return self._recv_token(conf, token) except: return True
class ThreadPool: __config = ServerConfig() def __init__(self): """ Inits a pool of worker threads which serves the clients requests """ self.tasks = Queue(self.__config.threads_count) self.workers = [] for i in range(self.__config.threads_count): self.workers.append(Worker(self.tasks, f"worker {i} ")) def add_task(self, func, *args, **kwargs): """Add a task to the queue""" self.tasks.put((func, args, kwargs)) def terminate_all_workers(self): """Terminate all workers""" for pill in [None for i in range(len(self.workers))]: self.tasks.put(pill)
class FileCache: __config = ServerConfig() def __init__(self): self.__cache_max_size = self.__config.file_cache_size self.__cache_errors = self.__config.file_cache_errors self.__read_file_desc = self.__prepare_read_file_desc() def get_fd(self, path): """ Get descriptor of specified file :param path: path to file :return: None or file descriptor """ if self.__cache_errors: return self.__read_file_desc(path) else: try: return self.__read_file_desc(path) except IOError: return None def __prepare_read_file_desc(self): """ Wraps a descriptor read function with an LRU cache :return: read descriptor func decorated with LRU cache """ @lru_cache(maxsize=self.__cache_max_size) def result(filename): if self.__cache_errors: descriptor = None try: descriptor = os.open(filename, os.O_RDONLY) except IOError: pass else: descriptor = os.open(filename, os.O_RDONLY) return descriptor return result
def main(argv): config_logging() # We don't want to be too aggressive with concurrency. test_cpu_count = int(multiprocessing.cpu_count() / 4) # The build bots tend to be overloaded, so we want to restrict # cpu usage to prevent strange timeout issues we have seen in the past. # We can increment this once we are building on our own controlled macs if platform.system() == 'Darwin': test_cpu_count = 2 parser = argparse.ArgumentParser( description="Configures the android emulator cmake project so it can be build" ) parser.add_argument( "--out_dir", type=str, required=True, help="The ouput directory") parser.add_argument( "--dist_dir", type=str, required=True, help="The destination directory") parser.add_argument( "--build-id", type=str, default=[], required=True, dest="build_id", help="The emulator build number") parser.add_argument( "--test_jobs", type=int, default=test_cpu_count, dest="test_jobs", help="Specifies the number of tests to run simultaneously") parser.add_argument( "--target", type=str, default=platform.system(), help="The build target, defaults to current os") parser.add_argument( "--qtwebengine", action='store_true', help="Build emulator with QtWebEngine libraries") parser.add_argument( "--gfxstream", action='store_true', help="Build gfxstream libraries") parser.add_argument( "--crosvm", action='store_true', help="Build crosvm") args = parser.parse_args() version = "{0[0]}.{0[1]}.{0[2]}".format(sys.version_info) logging.info("Building with %s on %s - %s, Python: %s", PYTHON_EXE, platform.system(), platform.uname(), version) target = platform.system().lower() if args.target: target = args.target.lower() if not os.path.isabs(args.out_dir): args.out_dir = os.path.join(AOSP_ROOT, args.out_dir) # Make sure we have all the build dependencies install_deps() # This how we are going to launch the python build script launcher = [PYTHON_EXE, os.path.join(AOSP_ROOT, "external", "qemu", "android", "build", "python", "cmake.py") ] gfxstream_arg = "--gfxstream" crosvm_arg = "--crosvm" # Standard arguments for both debug & production. if args.qtwebengine: qtwebengine_arg = "--qtwebengine" else: qtwebengine_arg = "--noqtwebengine" cmd = [ qtwebengine_arg, "--noshowprefixforinfo", "--out", args.out_dir, "--sdk_build_number", args.build_id, "--target", target, "--dist", args.dist_dir, "--test_jobs", str(args.test_jobs) ] prod = ["--crash", "prod"] debug = ["--config", "debug"] if args.gfxstream: cmd.append(gfxstream_arg) if args.crosvm: cmd.append(crosvm_arg) # Kick of builds for 2 targets. (debug/release) with ServerConfig(is_presubmit(args.build_id)) as cfg: run(launcher + cmd + prod, cfg.get_env(), 'rel') if not args.gfxstream and not args.crosvm: run(launcher + cmd + debug, cfg.get_env(), 'dbg') logging.info("Build completed!")
system_use_windows = 1 # flask init 配置web文件目录 app = Flask(__name__, static_folder="../web/static", template_folder='../web/templates') # app.config['SECRET_KEY'] = 'secret!' app.config['SECRET_KEY'] = os.urandom( 24) # 设置为24位的字符,每次运行服务器都是不同的,所以服务器启动一次上次的session就清除。 app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=7) # 设置session的保存时间。 app.config['ssl_context'] = 'adhoc' socketio = SocketIO(app) M_WEBSOCKET_RESPONSE = 'response' # config information server_config = ServerConfig() # database mysql_server_ip = server_config.get_mysql_server_ip() mysql_server_port = server_config.get_mysql_server_port() mysql_server_account = server_config.get_mysql_server_account() mysql_server_passwd = server_config.get_mysql_server_passwd() mysql_server_db = server_config.get_mysql_server_db() sql_util = sqlHelper(mysql_server_ip, mysql_server_port, mysql_server_account, mysql_server_passwd, mysql_server_db) sql_util.connect_mysql() # k8s helper k8s_config_file = server_config.get_k8s_config_file() k8sHelper = K8sHelper(k8s_config_file)
class ProxyPasser: __config = ServerConfig() __req_parser = RequestParser() __res_parser = ResponseParser() def __init__(self, client_socket): self.__client = client_socket self.__target = socket.socket(socket.AF_INET, socket.SOCK_STREAM) def run(self): """ Init connection client <> target :return: """ logging.info(f"Client <> Target opened") raw_req = self.__client.recv(8192) if raw_req is None: logging.info(f"Client terminated (RECV)") self.__client.close() return _, req = self.__req_parser.parse(raw_req) host, port = self.__req_parser.get_destination(req) full_body = None content_len = req.headers.get('Content-Length') if content_len: full_body = self._read_http_message_content_length(self.__client, req.body, int(content_len)) transfer_enc = req.headers.get('Transfer-Encoding') if transfer_enc == 'chunked': full_body = self._read_http_message_chunked_encoding(self.__client, req.body) if req.method not in ['POST','PUT', 'PATCH']: raw_req = req.headers_raw else: if not full_body: self.__client.close() return raw_req = req.headers_raw + full_body target_host_socket = self.__target target_host_socket.connect((host, port)) target_host_socket.sendall(raw_req) raw_rsp = target_host_socket.recv(8192) if raw_rsp is None: logging.info(f"Server terminated (RECV)") self.__close_conn(target_host_socket) return orig_headers, mod_headers, body = self.__res_parser.parse(raw_rsp) full_body = None content_len = orig_headers.get('Content-Length') if content_len: full_body = self._read_http_message_content_length(target_host_socket, body, int(content_len)) transfer_enc = orig_headers.get('Transfer-Encoding') if transfer_enc == 'chunked': full_body = self._read_http_message_chunked_encoding(target_host_socket, body) # if not full_body: # return final_response = mod_headers + full_body self.__client.sendall(final_response) self.__close_conn(target_host_socket) def _read_http_message_content_length(self, client, body_bytes, total_len): """Reads request data from socket. If request method or protocol are not supported, rejects it""" result = body_bytes curr_len = len(body_bytes) while curr_len < total_len: chunk = client.recv(8192) if not chunk: break result += chunk curr_len += len(chunk) return result def _read_http_message_chunked_encoding(self, client, body_bytes): """Reads request data from socket. If request method or protocol are not supported, rejects it""" result = bytearray() avail_read = 0 while True: if len(body_bytes) < 2: chunk = client.recv(8192) if not chunk: break body_bytes += chunk if avail_read > 0: fragment = body_bytes[:avail_read] result += fragment body_bytes = body_bytes[avail_read:] avail_read -= len(fragment) if avail_read < 0: avail_read = 0 else: body_decoded = body_bytes.decode('utf-8', errors='ignore').splitlines() chunk_line = body_decoded[0] if chunk_line == '': chunk_line = body_decoded[1] length = int(chunk_line.strip().split(';')[0], 16) chunk_line_len = len(chunk_line) if length == 0: return result + body_bytes result += body_bytes[:length+chunk_line_len] avail_read = length + chunk_line_len - len(body_bytes[chunk_line_len:length+chunk_line_len]) body_bytes = body_bytes[length+chunk_line_len:] return result def __close_conn(self, target_host_socket): """ Close target and client connections :param target_host_socket: :return: """ self.__client.close() target_host_socket.close() logging.info("Client <> Target closed")
def test_load_balanced(self): # =-=-=-=-=-=-=- # read server_config.json and .odbc.ini cfg = ServerConfig() if cfg.get('catalog_database_type') == "postgres": # =-=-=-=-=-=-=- # seed load table with fake values - rescA should win secs = int(time.time()) cfg.exec_sql_cmd( "insert into r_server_load_digest values ('rescA', 50, %s)" % secs) cfg.exec_sql_cmd( "insert into r_server_load_digest values ('rescB', 75, %s)" % secs) cfg.exec_sql_cmd( "insert into r_server_load_digest values ('rescC', 95, %s)" % secs) # =-=-=-=-=-=-=- # build a logical path for putting a file test_file = self.admin.session_collection + "/test_file.txt" # =-=-=-=-=-=-=- # put a test_file.txt - should be on rescA given load table values self.admin.assert_icommand( "iput -f ./test_load_balanced_suite.py " + test_file) self.admin.assert_icommand("ils -L " + test_file, 'STDOUT_SINGLELINE', "rescA") self.admin.assert_icommand("irm -f " + test_file) # =-=-=-=-=-=-=- # drop rescC to a load of 15 - this should now win cfg.exec_sql_cmd( "update r_server_load_digest set load_factor=15 where resc_name='rescC'" ) # =-=-=-=-=-=-=- # put a test_file.txt - should be on rescC given load table values self.admin.assert_icommand( "iput -f ./test_load_balanced_suite.py " + test_file) self.admin.assert_icommand("ils -L " + test_file, 'STDOUT_SINGLELINE', "rescC") self.admin.assert_icommand("irm -f " + test_file) # =-=-=-=-=-=-=- # clean up our alterations to the load table cfg.exec_sql_cmd( "delete from r_server_load_digest where resc_name='rescA'") cfg.exec_sql_cmd( "delete from r_server_load_digest where resc_name='rescB'") cfg.exec_sql_cmd( "delete from r_server_load_digest where resc_name='rescC'") else: print 'skipping test_load_balanced due to unsupported database for this test.'
def test_load_balanced(self): # =-=-=-=-=-=-=- # read server_config.json and .odbc.ini cfg = ServerConfig() if cfg.get('catalog_database_type') == "postgres": # =-=-=-=-=-=-=- # seed load table with fake values - rescA should win secs = int(time.time()) cfg.exec_sql_cmd("insert into r_server_load_digest values ('rescA', 50, %s)" % secs) cfg.exec_sql_cmd("insert into r_server_load_digest values ('rescB', 75, %s)" % secs) cfg.exec_sql_cmd("insert into r_server_load_digest values ('rescC', 95, %s)" % secs) # =-=-=-=-=-=-=- # build a logical path for putting a file test_file = self.admin.session_collection + "/test_file.txt" # =-=-=-=-=-=-=- # put a test_file.txt - should be on rescA given load table values self.admin.assert_icommand("iput -f ./test_load_balanced_suite.py " + test_file) self.admin.assert_icommand("ils -L " + test_file, 'STDOUT', "rescA") self.admin.assert_icommand("irm -f " + test_file) # =-=-=-=-=-=-=- # drop rescC to a load of 15 - this should now win cfg.exec_sql_cmd("update r_server_load_digest set load_factor=15 where resc_name='rescC'") # =-=-=-=-=-=-=- # put a test_file.txt - should be on rescC given load table values self.admin.assert_icommand("iput -f ./test_load_balanced_suite.py " + test_file) self.admin.assert_icommand("ils -L " + test_file, 'STDOUT', "rescC") self.admin.assert_icommand("irm -f " + test_file) # =-=-=-=-=-=-=- # clean up our alterations to the load table cfg.exec_sql_cmd("delete from r_server_load_digest where resc_name='rescA'") cfg.exec_sql_cmd("delete from r_server_load_digest where resc_name='rescB'") cfg.exec_sql_cmd("delete from r_server_load_digest where resc_name='rescC'") else: print 'skipping test_load_balanced due to unsupported database for this test.'
def __init__(self, cache_storage: CacheStorage): self.__config = ServerConfig() self.__cache = cache_storage
class ProxyPasser: __config = ServerConfig() def __init__(self, client_socket, target_socket=None): self.__client = client_socket if target_socket is not None: self.__target = target_socket else: self.__target = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.__target_host = self.__config.proxy_pass_host self.__target_port = self.__config.proxy_pass_port def run(self): """ Init connection client <> target :return: """ logging.info(f"Client <> Target opened") self.__client.setblocking(False) target_host_socket = self.__target target_host_socket.connect((self.__target_host, self.__target_port)) target_host_socket.setblocking(False) client_data = bytearray() target_data = bytearray() while True: inputs = [self.__client, target_host_socket] outputs = [] if len(client_data) > 0: outputs.append(self.__client) if len(target_data) > 0: outputs.append(target_host_socket) try: in_rdy, out_rdy, _ = select(inputs, outputs, [], 1.0) except Exception as e: logging.error(e) logging.error(traceback.format_exc()) break data = bytearray() for connection in in_rdy: try: data = connection.recv(4096) except Exception as e: logging.error(e) if data is not None: if len(data) > 0: if connection == self.__client: target_data += data else: client_data += data else: self.__close_conn(target_host_socket) return for connection in out_rdy: if connection == self.__client and len(client_data) > 0: bytes_written = self.__client.send(client_data) if bytes_written > 0: client_data = client_data[bytes_written:] elif connection == target_host_socket and len(target_data) > 0: bytes_written = target_host_socket.send(target_data) if bytes_written > 0: target_data = target_data[bytes_written:] def __close_conn(self, target_host_socket): """ Close target and client connections :param target_host_socket: :return: """ self.__client.close() target_host_socket.close() logging.info("Client <> Target closed")
class App: logger = None server_config = None message_types = { "request_volume": "volume:request", "receive_volume": "volume:receive", "update_volume": "volume:update", "receive_bell": "bell:receive", } cleaning_up = False future = None def __init__(self, debug=False): self.debug = debug self.init_logger() self.load_config() self.init_mixer() self.init_gpio() def init_logger(self): logging.config.dictConfig({ "version": 1, "disable_existing_loggers": not self.debug, "formatters": { "console": { "format": "\033[1;31m%(levelname)s\033[1;0m %(asctime)s %(module)s %(process)d %(thread)d %(message)s" }, "verbose": { "format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s" }, "simple": { "format": "%(levelname)s %(message)s" }, }, "handlers": { "console": { "class": "logging.StreamHandler", "formatter": "console", }, "file": { "class": "logging.FileHandler", "filename": "app.log", "formatter": "verbose", }, }, "loggers": { "": { "handlers": ["console", "file"], "level": "DEBUG", "propagate": True, }, }, }) self.logger = logging.getLogger('doorbell') def load_config(self): config_kwargs = { name: getattr(local_config, name) for name in dir(local_config) if not name.startswith("_") } self.server_config = ServerConfig(debug=self.debug, **config_kwargs) self.logger.info(self.server_config.sanitized_dict()) def init_mixer(self): pygame.mixer.init() pygame.mixer.music.load(self.server_config.audio_file) pygame.mixer.music.set_volume(0.6) def init_gpio(self): if GPIO: GPIO.setmode(GPIO.BCM) BUTTON = 21 GPIO.setup(BUTTON, GPIO.IN) # See https://sourceforge.net/p/raspberry-gpio-python/wiki/Inputs/ GPIO.add_event_detect( BUTTON, GPIO.FALLING, callback=self.handle_ring, bouncetime=10000, ) def handle_ring(self, channel=None): self.logger.info('bell is ringing!') for client in tornado_server.connections: client.write_message({'type': self.message_types['receive_bell']}) if self._should_play_sound(): pygame.mixer.music.play() else: self.logger.debug( 'Not playing sound because in do-not-disturb-mode.') if self._should_send_notification(): notifications.send(self.server_config.notifications) else: self.logger.debug( 'Not sending notifications because volume is too high.') def start(self): loop = asyncio.get_event_loop() self.future = asyncio.gather( self.start_web_server(), self.gpio_loop(), ) self.cleaning_up = False signal.signal(signal.SIGINT, self.cleanup) signal.signal(signal.SIGTERM, self.cleanup) try: loop.run_until_complete(self.future) except CancelledError as e: if not self.cleaning_up: logger.error(str(e)) finally: loop.close() async def gpio_loop(self): timeout_sec = 1 while True: if self.debug: # See https://stackoverflow.com/a/3471853/6928824 ready_to_read, _, _ = select([sys.stdin], [], [], timeout_sec) if ready_to_read: user_input = sys.stdin.read(2) if user_input == "r\n": print("fake bell!") self.handle_ring() await asyncio.sleep(0.200) async def start_web_server(self): tornado_server.start(self.server_config, self.message_types) def cleanup(self, sig, frame): """ Cleanup handler, e.g. for when Ctrl+C is pressed. """ self.cleaning_up = True pygame.mixer.quit() if GPIO: GPIO.cleanup() # 'stop' only affects 'loop.run_forever' but not # 'loop.run_until_complete'. Thus we just cancel the Future object. # See https://docs.python.org/3.5/library/asyncio-eventloop.html#asyncio.AbstractEventLoop.stop self.future.cancel() sys.exit(0) ########################################################################### # PRIVATE def _should_play_sound(self): return not utils.do_not_disturb_now(self.server_config) def _should_send_notification(self): return (pygame.mixer.music.get_volume() <= self.server_config.notifications['max_volume'])
class Server(): def __init__(self, signature, start_server=None, timeout_limit = 2, debug_mode = False): """Inicialização do servidor, necessita o recebimento da assinatura do servidor (instancia de ServerSignature). Opcionalmente também pode ser receber uma instancia de external server para se conectar a rede da qual ele faz parte. Não sendo informado um servidor, será inicializado uma nova rede. Outro parâmetro opcinal é o timeout_limit, default 2, que é a quantidade em segundos que o servidor deve esperar uma requisição antes de considerar que o servidor para quem foi enviada não está no ar.""" self.server_config = ServerConfig(signature, debug_mode = debug_mode) self.timeout_limit = timeout_limit self._server_list = set() thr = self.server_config.server_thread(self._msg_recv) thr.start() self.server_thread = thr self.token_thread_list = [] self.expecting = False if start_server: self.expecting = 'serverList' start_server.get_server_list(self.signature) @property def signature(self): # TODO: docstring return self.server_config.signature @property def debug(self): # TODO: docstring return self.server_config.debug def _append_server(self, server): """método interno para injeção de novos servidores na lista de servidores conhecidos.""" # TODO: docstring if (self.signature == server.server_config.signature): return server.debug = self.debug self._server_list.add(server) def _next_server(self, ignore=[]): """método interno para obténção do próximo servidor conhecido. Opcionalmente é possível enviar uma lista de ignore, para não enviar para os servidores que não responderam anteriormente. Se não houver servidor com hash maior que o da instancia e que não esteja na lista de ignore, é buscado o menor hash que não está na lista de ignore. Se não houver servidor algum, é retornada False""" sort_method = lambda se: hash(se.server_config) sorted_list = sorted(self._server_list, key=sort_method) for server in sorted_list: if server in ignore: continue if hash(server.server_config) > hash(self.server_config): return server for server in sorted_list: if server in ignore: continue return sorted_list[0] return False def _say_hello(self): """método interno para disparo das mensagens de aceno para os servidores conhecidos.""" for server in self._server_list: server.say_hello(self.signature) def _recv_server_list(self, str_msg): """Método interno para recebimento da listagem de servidores. É esperado receber uma string que valorada seja um iterável de tuplas de IP porta. Ex: "[(\\"localhost\\", 5050)]".""" # TODO: docstring exteral_server_list = eval(str_msg) for server in exteral_server_list: self._append_server(server) self._say_hello() return True def _recv_hello(self, conf): """Método interno para recebimento de aceno de um servidor.""" ext_server = ExternalServer(conf) self._append_server(ext_server) return True def _recv_server_list_request(self, conf): """Envia para o solicitante a lista dos servidores conhecidos""" conf.send(self.signature, repr(self.server_array())) return True def _conclude_token(self, token): # TODO: docstring if self.debug: print('--------------------------------------------------------------------------------') print('> Recebida resposta') print() print('> ' + token.url) print() for (server, response) in token.stack.items(): print(' > ' + server + ' - ' + response) print('--------------------------------------------------------------------------------') return True def _request_url(self, url): """Realiza a requisição para a URL e retona: - O código de status (200, 400, 404, 500). - Error, it is not possible to reach the informed url""" try: response = requests.get(url) return response.status_code except: return "Error, it is not possible to reach the informed url" def _proccess_token(self, token): """Realiza o processamento do token, utilizando o método _request_url para verificar o acesso local e atualiza o token""" url = token.url response = self._request_url(url) token.append_server(str(self.signature), str(response)) def _wait(self, validation=None, limit=None): """Implementação local para sleep. É possível informar o campo validation, como um método para ser chamado a fim de parar o sleep se retornar true. É tamém possível informar um limite em segundo, como default será o limite informado na instância.""" if not limit: limit = self.timeout_limit if not validation: validation = lambda: False for step in range(100000): time.sleep(self.timeout_limit/100000) if validation(): break def _send_token(self, token): """Realiza o envio do token para o proximo servidor, validando a resposta e buscando os proximos caso um pare de responder. É realizada a espera pelo tempo de timeout configurado.""" ignore = [] for server in self._server_list: if server.server_config.signature in token.server_stack: ignore.append(server) while True: server = self._next_server(ignore) if not server: return self._conclude_token(token) self.expecting = 'confirm_token' server.send_token(self.signature, token) self._wait(lambda: self.expecting != 'confirm_token' or not self.timeout_limit) if self.expecting == 'confirm_token': ignore.append(server) token.append_server(str(server.server_config.signature), 'Error, server not responding') continue break def _recv_token(self, conf, token): """Tratamento do recebimento do token. Envia a confirmação para a origem da mensagem, valida se não está na stack do token. Se já estiver, é concluido o token. Se não estiver, é realizada a verificação localmente, e o token é encaminhado para o proximo. Esse processo é realizado com uso de uma thread separada, para desocupar o socket de recebimento, e possibilitar receber a confiramação de que o proximo servidor recebeu o token""" conf.send(self.signature, 'confirm_token') if str(self.signature) in token.server_stack: return self._conclude_token(token) self._proccess_token(token) thr = threading.Thread(target=self._send_token,args=[token]) thr.start() self.token_thread_list.append(thr) return True def _recv_confirm_token(self): """Tratamento da confirmação de recebimendo do token. Apenas limpa o campo de respostas esperadas.""" self.expecting = False return True def _msg_recv(self, bts_msg, orig_tup): """Método interno para recebimento de mensagem, enviado como callback para a thread separada. Recebe o byte string da mensagem e a tupla IP porta de quem a mensagem foi recebida. Retornar valores valorados como falso implica em para a thread de recebimento de mensagens. Trata mensagens: - get_server_list - hello - halt - confirm_token (se estiver aguardado a confirmação) - server_list (se estiver aguardado a lista) - token (se o valor não foi processado em nenhuma opção anterior e a evaluação da mensagem é um token, o token é processado.)""" str_msg = self.server_config.decode(bts_msg) sig = ServerSignature(*orig_tup, True) conf = ServerConfig(sig) if str_msg == 'get_server_list': return self._recv_server_list_request(conf) if str_msg == 'hello': return self._recv_hello(conf) if str_msg == 'halt': return False if str_msg == 'confirm_token': if self.expecting == 'confirm_token': return self._recv_confirm_token() if self.expecting == 'serverList': try: result = self._recv_server_list(str_msg) self.expecting = False return result except: pass try: token = eval(str_msg) if isinstance(token, Token): return self._recv_token(conf, token) except: return True def send_token(self, url): """Iinicailiza um token a partir da URL recebida, valida o mesmo localmente e envia para o próximo servidor""" token = Token(url) self._proccess_token(token) return self._send_token(token) def halt(self): """Método para finalizar a execução do socket de recebimento.""" self.server_config.send(self.signature, 'halt') self.timeout_limit = 0 def server_array(self): """interface para obtenção da lista de tuplas IP e porta de recebimento dos servidores conhecidos.""" external_self = ExternalServer(self.server_config) return self._server_list | {external_self}
class ErrorPage: __errors = [{ "code": 400, "short_desc": "Bad Request", "long_desc": "Syntax of the request not understood by the server." }, { "code": 401, "short_desc": "Not Authorized", "long_desc": "Request requires user authentication" }, { "code": 402, "short_desc": "Payment Required", "long_desc": "Reserved for future use." }, { "code": 403, "short_desc": "Forbidden", "long_desc": "Server refuses to fulfill the request." }, { "code": 404, "short_desc": "Not Found", "long_desc": "Document or file requested by the client was not" "found." }, { "code": 405, "short_desc": "Method Not Allowed", "long_desc": "Method specified in the Request-Line was not allowed" "for the specified resource." }, { "code": 406, "short_desc": "Not Acceptable", "long_desc": "Resource requested generates response entities that" "has content characteristics not specified in" "he accept headers." }, { "code": 407, "short_desc": "Proxy Authentication Required", "long_desc": "Request requires the authentication with the proxy." }, { "code": 408, "short_desc": "Request Timeout", "long_desc": "Client fails to send a request in the time allowed" "by the server." }, { "code": 409, "short_desc": "Conflict", "long_desc": "Request was unsuccessful due to a conflict in the" "state of the resource." }, { "code": 410, "short_desc": "Gone", "long_desc": "Resource requested is no longer available with no" "forwarding address" }, { "code": 411, "short_desc": "Length Required", "long_desc": "Server doesn’t accept the request without a valid" "Content-Length header field." }, { "code": 412, "short_desc": "Precondition Failed", "long_desc": "Precondition specified in the Request-Header field" "returns false." }, { "code": 413, "short_desc": "Request Entity Too Large", "long_desc": "Request unsuccessful as the request entity is larger" "than that allowed by the server" }, { "code": 414, "short_desc": "Request URL Too Long", "long_desc": "Request unsuccessful as the URL specified is longer" "than the one, the server is willing to process." }, { "code": 415, "short_desc": "Unsupported Media Type", "long_desc": "Request unsuccessful as the entity of the request is" "in a format not supported by the requested resource" }, { "code": 416, "short_desc": "Requested Range Not Satisfiable", "long_desc": "Request included a Range request-header field" "without any range-specifier value" }, { "code": 417, "short_desc": "Expectation Failed", "long_desc": "Expectation given in the Expect request-header" "was not fulfilled by the server." }, { "code": 422, "short_desc": "Unprocessable Entity", "long_desc": "Request well-formed but unable to process because" "of semantic errors" }, { "code": 423, "short_desc": "Locked", "long_desc": "Resource accessed was locked" }, { "code": 424, "short_desc": "Failed Dependency", "long_desc": "Request failed because of the failure of a" "previous request" }, { "code": 426, "short_desc": "Upgrade Required", "long_desc": "Client should switch to Transport Layer Security" }, { "code": 500, "short_desc": "Internal Server Error", "long_desc": "Request unsuccessful because of an unexpected" "condition encountered by the server." }, { "code": 501, "short_desc": "Not Implemented", "long_desc": "Request unsuccessful as the server could not " "support the functionality needed to fulfill" " the request." }, { "code": 502, "short_desc": "Bad Gateway", "long_desc": "Server received an invalid response from the" "upstream server while trying to fulfill the request." }, { "code": 503, "short_desc": "Service Unavailable", "long_desc": "Request unsuccessful to the server being down" "or overloaded." }, { "code": 504, "short_desc": "Gateway Timeout", "long_desc": "Upstream server failed to send a request in the" "time allowed by the server." }, { "code": 505, "short_desc": "HTTP Version Not Supported", "long_desc": "Server does not support the HTTP version specified" "in the request." }] __instance = None __config = ServerConfig() def __new__(cls, *args, **kwargs): if not hasattr(cls, 'instance'): cls.__instance = super(ErrorPage, cls).__new__(cls) return cls.__instance def __init__(self): page_loc = Path.cwd() / self.__config.error_page_loc if page_loc.is_file(): with open(page_loc, 'rb') as output: self.error_page = output.read().decode("utf-8") else: logging.error("Error page was not found. Using default.") self.error_page = "{{CODE}} {{SHORT_DESC}} {{LONG_DESC}}" @lru_cache() def get_page_content(self, code): """ Get error page content as string :param code: error code :return: """ error = next((e for e in self.__errors if e.get("code") == code), None) result = self.error_page.replace("{{CODE}}", str(code)) result = result.replace("{{SHORT_DESC}}", error.get("short_desc")) return result.replace("{{LONG_DESC}}", error.get("long_desc"))
class RequestHandler: __mime: MimeTypes = MimeTypes() __work_dir: Path = Path.cwd() __buff_size: int = 4096 __config: ServerConfig = ServerConfig() __file_cache: FileCache = FileCache() __error_page: ErrorPage = ErrorPage() def handle_get(self, request): """GET request handler. Returns file to client :type request: Request """ logging.info(f'Processing GET request {request.path}') path_to_file = self.__get_abs_path(request.path) file_descriptor = self.__file_cache.get_fd(path_to_file) if file_descriptor and path_to_file.is_file(): response = Response(code=200) response.add_header("Content-Length", str(path.getsize(path_to_file))) file_type, charset = self.__mime.guess_type(path_to_file.as_uri()) response.add_header("Content-Type", file_type) return self.__send_file_response( response=response, file_descriptor=file_descriptor), response else: bad_req_response = Response(code=404) return self.__send_general_response( bad_req_response), bad_req_response @staticmethod def __send_file_response(response, file_descriptor, buffer_size=__buff_size): """Return response func with file loaded from specified path :type buffer_size: int :type file_descriptor: int :type response: Response """ def result(client): request_data = response.get_bytes() client.sendall(request_data) with open(file_descriptor, 'rb', closefd=False) as output: while True: data = output.read(buffer_size) if not data: break client.sendall(data) output.seek(0) return result @staticmethod def __send_general_response(response): """Return simple response func :type response: Response """ def result(client): data = response.get_bytes() client.sendall(data) return result def __get_abs_path(self, req_path): """Helper to get absolute path to requested file :type req_path: str """ path_components = req_path.split("/") result = self.__work_dir / self.__config.www_dir for part in path_components: result = result / part return result
def __init__(self, config_name="config.json"): self.__config = ServerConfig(config_name) logging.basicConfig(filename=self.__config.log_file, level=logging.DEBUG, format='%(asctime)s %(message)s') self.thread_pool = ThreadPool()
from ..user_client_manage.user_id_manage import UserIDManage from client_manage.user_client_manage.user_control_log import UserControlLog import datetime import uuid from server_config import ServerConfig from threading import Thread import threading import time import time M_WEBSOCKET_RESPONSE = 'response' server_config = ServerConfig() class UserAccountManage(object): def __init__(self, mysql_obj, arg_join_room, arg_leave_room, arg_emit): self.mysql_obj = mysql_obj self.table_name = 'null' self.socket_io_join_room = arg_join_room self.socket_io_leave_room = arg_leave_room self.socket_io_emit = arg_emit self.user_id_manage = UserIDManage(mysql_obj, 'user_id_create') self.user_control_log = UserControlLog(mysql_obj) # 注册账号 def register_account(self, account, passwd, name):