def postApi(self, uri, params={}, json={}): r""" Send a ``POST`` request to API server. Return response["data"] or [] :param uri: URI to request :param params: Optional arguments ``request`` takes :param json: Optional arguments ``json`` that ``request`` takes :rtype: list """ params["key"] = get_config().WEBAPI_TOKEN response = self.session_pool.post( "%s/mod_mu/%s" % (get_config().WEBAPI_URL, uri), params=params, json=json, timeout=10, ) if response.status_code != 200: logging.error("Server error with status code: %i" % response.status_code) raise Exception('Server Error!') try: json_data = response.json() except: logging.error("Wrong data: %s" % response.text) raise Exception('Server Error!') if len(json_data) != 2: logging.error("Wrong data: %s" % response.text) raise Exception('Server Error!') if json_data["ret"] == 0: logging.error("Wrong data: %s" % json_data["data"]) raise Exception('Server Error!') return json_data["data"]
def thread_db(obj): if configloader.get_config().SPEEDTEST == 0: return if configloader.get_config().API_INTERFACE == 'modwebapi': import webapi_utils global webapi webapi = webapi_utils.WebApi() global db_instance db_instance = obj() try: while True: try: db_instance.speedtest_thread() except Exception as e: import traceback trace = traceback.format_exc() logging.error(trace) #logging.warn('db thread except:%s' % e) if db_instance.event.wait(configloader.get_config().SPEEDTEST * 3600): break if db_instance.has_stopped: break except KeyboardInterrupt as e: pass db_instance = None
def getApi(self, uri, params={}): res = None try: uri_params = params.copy() uri_params['key'] = get_config().WEBAPI_TOKEN res = self.session_pool.get('%s/mod_mu/%s' % (get_config().WEBAPI_URL, uri), params=uri_params, timeout=10) try: data = res.json() except Exception: if res: logging.error("Error data:%s" % (res.text)) return [] if data['ret'] == 0: logging.error("Error data:%s" % (res.text)) logging.error("request %s error!wrong ret!" % (uri)) return [] return data['data'] except Exception: import traceback trace = traceback.format_exc() logging.error(trace) raise Exception('network issue or server error!')
def thread_db(obj): if (configloader.get_config().CLOUDSAFE == 0 or platform.system() != "Linux"): return if configloader.get_config().API_INTERFACE == "modwebapi": import webapi_utils global webapi webapi = webapi_utils.WebApi() global db_instance db_instance = obj() try: while True: try: db_instance.auto_block_thread() except Exception as e: import traceback trace = traceback.format_exc() logging.error(trace) # logging.warn('db thread except:%s' % e) if db_instance.event.wait(60): break if db_instance.has_stopped: break except KeyboardInterrupt as e: pass db_instance = None
def _send_req(self, hostname, qtype): req = build_request(hostname, qtype) netflix_dns = get_config().NETFLIX_DNS hbo_dns = get_config().HBO_DNS hulu_dns = get_config().HULU_DNS bbc_dns = get_config().BBC_DNS for server in self._servers: logging.debug('resolving %s with type %d using server %s', hostname, qtype, server) use_default_dns = True if netflix_dns != 'empty' and ("netflix" in str(hostname) or "nflx" in str(hostname)): self._sock.sendto(req, (netflix_dns, 53)) use_default_dns = False if hbo_dns != 'empty' and ( "hbo" in str(hostname) or "execute-api.ap-southeast-1.amazonaws.com" == str(hostname)): self._sock.sendto(req, (hbo_dns, 53)) use_default_dns = False if hulu_dns != 'empty' and ("hulu" in str(hostname) or "happyon.jp" == str(hostname)): self._sock.sendto(req, (hulu_dns, 53)) use_default_dns = False if bbc_dns != 'empty' and ("bbc" in str(hostname) or "co.uk" in str(hostname) or "uk-live" in str(hostname)): self._sock.sendto(req, (bbc_dns, 53)) use_default_dns = False if use_default_dns: self._sock.sendto(req, server)
def run_command(self, command, id): value = subprocess.check_output(command.split(" ")).decode("utf-8") if configloader.get_config().API_INTERFACE == "modwebapi": global webapi webapi.postApi( "func/autoexec", {"node_id": configloader.get_config().NODE_ID}, { "data": [{ "value": "NodeID:" + str(configloader.get_config().NODE_ID) + " Exec Command ID:" + str(configloader.get_config().NODE_ID) + " Result:\n" + str(value), "sign": str(value), "type": 2, }] }, ) else: mysqlObj = MySqlWrapper() mysqlObj.write_running_command(value) del mysqlObj
def __init__(self): super(Dbv3Transfer, self).__init__() self.update_node_state = True if get_config( ).API_INTERFACE != 'legendsockssr' else False if self.update_node_state: self.key_list += ['id'] self.key_list += ['method'] if self.update_node_state: self.ss_node_info_name = 'ss_node_info_log' self.from_user = "******" if get_config().API_INTERFACE == 'ssrslave': self.key_list += [ 'obfs', 'protocol', 'protocol_param', 'speed_limit_per_user' ] self.from_user = "******" + str( self.cfg["node_id"]) if get_config().API_INTERFACE == 'sspanelv3ssr': self.key_list += ['obfs', 'protocol'] if get_config().API_INTERFACE == 'glzjinmod': self.key_list += ['obfs', 'protocol'] self.ss_node_info_name = 'ss_node_info' else: self.key_list += ['obfs', 'protocol'] self.start_time = time.time()
def postApi(self, uri, params={}, raw_data={}): res = None try: uri_params = params.copy() uri_params["key"] = get_config().WEBAPI_TOKEN res = self.session_pool.post( "%s/mod_mu/%s" % (get_config().WEBAPI_URL, uri), params=uri_params, json=raw_data, timeout=10, ) try: data = res.json() except Exception: if res: logging.error("Error data:%s" % (res.text)) raise Exception("error data!") if data["ret"] == 0: logging.error("Error data:%s" % (res.text)) logging.error("request %s error!wrong ret!" % (uri)) raise Exception("wrong ret!") return data["data"] except Exception: import traceback trace = traceback.format_exc() logging.error(trace) raise Exception("network issue or server error!")
def getKeys(): key_list = ['id' , 'port' , 'u', 'd', 'transfer_enable', 'passwd', 'enable' ] if get_config().API_INTERFACE == 'sspanelv3': key_list += ['method'] elif get_config().API_INTERFACE == 'sspanelv3ssr': key_list += ['method', 'obfs', 'protocol'] elif get_config().API_INTERFACE == 'glzjinmod': key_list += ['method', 'obfs','obfs_param','protocol','protocol_param','id','node_speedlimit','forbidden_ip','forbidden_port','disconnect_ip','is_multi_user'] return key_list
def del_server(self, port): port = int(port) logging.info("del server at %d" % port) try: udpsock = socket(AF_INET, SOCK_DGRAM) udpsock.sendto('%s:%s:0:0' % (get_config().MANAGE_PASS, port), (get_config().MANAGE_BIND_IP, get_config().MANAGE_PORT)) udpsock.close() except Exception as e: logging.warn(e) return True
def __init__(self): super(Dbv3Transfer, self).__init__() self.key_list += ['id', 'method'] self.ss_node_info_name = 'ss_node_info_log' if get_config().API_INTERFACE == 'sspanelv3ssr': self.key_list += ['obfs', 'protocol'] if get_config().API_INTERFACE == 'glzjinmod': self.key_list += ['obfs', 'protocol'] self.ss_node_info_name = 'ss_node_info' self.start_time = time.time()
def __init__(self): self.config_path = get_config().MUDB_FILE try: self.server_addr = get_config().SERVER_PUB_ADDR except: self.server_addr = '127.0.0.1' self.data = MuJsonLoader() if self.server_addr == '127.0.0.1': self.server_addr = self.getipaddr()
def run_command(self, command, id): value = subprocess.check_output(command.split(' ')).decode('utf-8') if configloader.get_config().API_INTERFACE == 'modwebapi': global webapi webapi.postApi('func/autoexec', {'node_id': configloader.get_config().NODE_ID}, {'data': [{'value': 'NodeID:' + str(configloader.get_config( ).NODE_ID) + ' Exec Command ID:' + str(configloader.get_config().NODE_ID) + " Result:\n" + str(value), 'sign': str(value), 'type': 2}]}) else: import cymysql conn = cymysql.connect( host=configloader.get_config().MYSQL_HOST, port=configloader.get_config().MYSQL_PORT, user=configloader.get_config().MYSQL_USER, passwd=configloader.get_config().MYSQL_PASS, db=configloader.get_config().MYSQL_DB, charset='utf8') conn.autocommit(True) cur = conn.cursor() cur.execute( "INSERT INTO `auto` (`id`, `value`, `sign`, `datetime`,`type`) VALUES (NULL, 'NodeID:" + str( configloader.get_config().NODE_ID) + " Result:\n" + str(value) + "', 'NOT', unix_timestamp(),'2')") rows = cur.fetchall() cur.close() conn.close()
def update_all_user(self, dt_transfer): global webapi update_transfer = {} alive_user_count = 0 bandwidth_thistime = 0 data = [] for id in dt_transfer.keys(): if dt_transfer[id][0] == 0 and dt_transfer[id][1] == 0: continue data.append({ "u": dt_transfer[id][0], "d": dt_transfer[id][1], "user_id": self.port_uid_table[id], }) update_transfer[id] = dt_transfer[id] webapi.postApi("users/traffic", {"node_id": get_config().NODE_ID}, {"data": data}) webapi.postApi( "nodes/%d/info" % (get_config().NODE_ID), {"node_id": get_config().NODE_ID}, { "uptime": str(self.uptime()), "load": str(self.load()) }, ) online_iplist = ServerPool.get_instance().get_servers_iplist() data = [] for port in online_iplist.keys(): for ip in online_iplist[port]: data.append({"ip": ip, "user_id": self.port_uid_table[port]}) webapi.postApi("users/aliveip", {"node_id": get_config().NODE_ID}, {"data": data}) detect_log_list = ServerPool.get_instance().get_servers_detect_log() data = [] for port in detect_log_list.keys(): for rule_id in detect_log_list[port]: data.append({ "list_id": rule_id, "user_id": self.port_uid_table[port] }) webapi.postApi( "users/detectlog", {"node_id": get_config().NODE_ID}, {"data": data}, ) return update_transfer
def getKeys(): key_list = ['id', 'port', 'u', 'd', 'transfer_enable', 'enable'] if get_config().API_INTERFACE == 'sspanelv3': key_list += ['method'] elif get_config().API_INTERFACE == 'sspanelv3ssr': key_list += ['method', 'obfs', 'protocol'] elif get_config().API_INTERFACE == 'glzjinmod': key_list += [ 'node_speedlimit', 'forbidden_ip', 'forbidden_port', 'disconnect_ip', 'is_multi_user' ] return key_list
def get_mu_host(id, md5): regex_text = get_config().MU_REGEX regex_text = regex_text.replace('%id', str(id)) regex_text = regex_text.replace('%suffix', get_config().MU_SUFFIX) regex = re.compile(r'%-?[1-9]\d*m') for item in regex.findall(regex_text): regex_num = item.replace('%', "") regex_num = regex_num.replace('m', "") md5_length = int(regex_num) if md5_length < 0: regex_text = regex_text.replace(item, md5[32 + md5_length:]) else: regex_text = regex_text.replace(item, md5[:md5_length]) return regex_text
def add_to_loop(self, loop): if self._loop: raise Exception('already add to loop') self._loop = loop # TODO when dns server is IPv6 try: self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.SOL_UDP) self._sock.bind( (get_config().MANAGE_BIND_IP, get_config().MANAGE_PORT)) self._sock.setblocking(False) loop.add(self._sock, eventloop.POLL_IN, self) except Exception as e: logging.error('\n\nServerMgr Start Error %s \n\n' % e)
def __init__(self): super(Dbv3Transfer, self).__init__() self.update_node_state = True if get_config().API_INTERFACE != 'legendsockssr' else False if self.update_node_state: self.key_list += ['id'] self.key_list += ['method'] if self.update_node_state: self.ss_node_info_name = 'ss_node_info_log' if get_config().API_INTERFACE == 'sspanelv3ssr': self.key_list += ['obfs', 'protocol'] if get_config().API_INTERFACE == 'glzjinmod': self.key_list += ['obfs', 'protocol'] self.ss_node_info_name = 'ss_node_info' else: self.key_list += ['obfs', 'protocol'] self.start_time = time.time()
def update_all_user(self, dt_transfer): global webapi node_id = get_config().NODE_ID update_transfer = {} # 用户流量上报 data = [] for id in dt_transfer.keys(): if dt_transfer[id][0] == 0 and dt_transfer[id][1] == 0: continue data.append({'u': dt_transfer[id][0], 'd': dt_transfer[ id][1], 'user_id': self.port_uid_table[id]}) update_transfer[id] = dt_transfer[id] if len(data) > 0: tarffic_data = {'node_id': node_id, 'data': data} webapi.postApi('/traffic/upload', tarffic_data) # 节点在线ip上报 node_online_ip = ServerPool.get_instance().get_servers_ip_list() ip_data = {} for k, v in node_online_ip.items(): ip_data[self.port_uid_table[k]] = v webapi.postApi('/nodes/aliveip', {'node_id': node_id, 'data': ip_data}) # 节点人数上报 alive_user_count = len(self.onlineuser_cache) online_data = {'node_id': node_id, 'online_user': alive_user_count} webapi.postApi('/nodes/online', online_data) return update_transfer
def __init__(self): import threading self.event = threading.Event() self.key_list = [ 'port', 'u', 'd', 'transfer_enable', 'passwd', 'enable' ] self.last_get_transfer = {} # 上一次的实际流量 self.last_update_transfer = {} # 上一次更新到的流量(小于等于实际流量) self.force_update_transfer = set() # 强制推入数据库的ID self.users = [] self.onlineuser_cache = lru_cache.LRUCache(timeout=60 * 30) # 用户在线状态记录 self.pull_ok = False # 记录是否已经拉出过数据 self.mu_ports = {} self.user_pass = {} # 记录更新此用户流量时被跳过多少次 self.logger = logging.getLogger(__name__) if get_config().debug: self.logger.setLevel(logging.DEBUG) fh = logging.FileHandler('log.txt', mode='a', encoding=None, delay=False) formater = logging.Formatter( '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s' ) fh.setFormatter(formater) self.logger.addHandler(fh)
def pull_db_all_user(self): ''' 拉取符合要求的用户信息 ''' api = EhcoApi() node_id = get_config().NODE_ID # 获取节点流量比例信息 nodeinfo = api.getApi('/nodes/{}'.format(node_id)) if not nodeinfo: logging.warn('没有查询到满足要求的节点,请检查自己的node_id!,或者该节点流量已经用光') rows = [] return rows # 流量比例设置 node_info_keys = [ 'traffic_rate', ] node_info_dict = {} for column in range(len(nodeinfo)): node_info_dict[node_info_keys[column]] = nodeinfo[column] self.cfg['transfer_mul'] = float(node_info_dict['traffic_rate']) time.sleep(0.01) # 获取符合条件的用户信息 data = api.getApi('/users/nodes/{}'.format(node_id)) api.close() if not data: rows = [] logging.warn('没有查询到满足要求的user,请检查自己的node_id!') return rows rows = data return rows
def main(): shell.check_python() if get_config().API_INTERFACE == 'modwebapi': threadMain = MainThread(web_transfer.WebTransfer) else: threadMain = MainThread(db_transfer.DbTransfer) threadMain.start() threadSpeedtest = MainThread(speedtest_thread.Speedtest) threadSpeedtest.start() threadAutoexec = MainThread(auto_thread.AutoExec) threadAutoexec.start() threadAutoblock = MainThread(auto_block.AutoBlock) threadAutoblock.start() try: while threadMain.is_alive(): threadMain.join(10.0) except (KeyboardInterrupt, IOError, OSError) as e: import traceback traceback.print_exc() threadMain.stop() if threadSpeedtest.is_alive(): threadSpeedtest.stop() if threadAutoexec.is_alive(): threadAutoexec.stop() if threadAutoblock.is_alive(): threadAutoblock.stop()
def thread_db(obj): import socket import time global db_instance timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = obj() ServerPool.get_instance() shell.log_shadowsocks_version() import resource logging.info('current process RLIMIT_NOFILE resource: soft %d hard %d' % resource.getrlimit(resource.RLIMIT_NOFILE)) try: while True: load_config() db_instance.load_cfg() try: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() if rows: db_instance.pull_ok = True db_instance.del_server_out_of_bound_safe(last_rows, rows) last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) #logging.warn('db thread except:%s' % e) if db_instance.event.wait(get_config().UPDATE_TIME) or not ServerPool.get_instance().thread.is_alive(): break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None
def thread_db(obj): import socket import time global db_instance timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = obj() try: while True: load_config() db_instance.load_cfg() try: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() if rows: db_instance.pull_ok = True db_instance.del_server_out_of_bound_safe(last_rows, rows) last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) #logging.warn('db thread except:%s' % e) if db_instance.event.wait( get_config().UPDATE_TIME ) or not ServerPool.get_instance().thread.is_alive(): break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None
def _loop(loop, dns_resolver, mgr): logger = logging.getLogger(__name__) if get_config().debug: logger.setLevel(logging.DEBUG) fh = logging.FileHandler('log.txt', mode='a', encoding=None, delay=False) formater = logging.Formatter( '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s' ) fh.setFormatter(formater) logger.addHandler(fh) try: if mgr is not None: mgr.add_to_loop(loop) dns_resolver.add_to_loop(loop) loop.run() except (KeyboardInterrupt, IOError, OSError) as e: logger.error(e) import traceback traceback.print_exc() os.exit(0) except Exception as e: logger.error(e) import traceback traceback.print_exc()
def __init__(self): shell.check_python() self.config = shell.get_config(False) self.dns_resolver = asyncdns.DNSResolver() if not self.config.get('dns_ipv6', False): asyncdns.IPV6_CONNECTION_SUPPORT = False self.mgr = None # asyncmgr.ServerMgr() self.tcp_servers_pool = {} self.tcp_ipv6_servers_pool = {} self.udp_servers_pool = {} self.udp_ipv6_servers_pool = {} self.stat_counter = {} self.loop = eventloop.EventLoop() self.logger = logging.getLogger(__name__) if get_config().debug: self.logger.setLevel(logging.DEBUG) fh = logging.FileHandler('log.txt', mode='a', encoding=None, delay=False) formater = logging.Formatter( '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s' ) fh.setFormatter(formater) self.logger.addHandler(fh) self.thread = MainThread((self.loop, self.dns_resolver, self.mgr)) self.thread.start()
def __init__(self): shell.check_python() self.config = shell.get_config(False) self.dns_resolver = asyncdns.DNSResolver() if not self.config.get('dns_ipv6', False): asyncdns.IPV6_CONNECTION_SUPPORT = False self.mgr = None # asyncmgr.ServerMgr() self.eventloop_pool = {} self.dns_resolver_pool = {} self.dns_resolver = asyncdns.DNSResolver() self.loop = eventloop.EventLoop() self.thread = MainThread((self.loop, self.dns_resolver, self.mgr)) self.thread.start() self.tcp_servers_pool = {} self.tcp_ipv6_servers_pool = {} self.udp_servers_pool = {} self.udp_ipv6_servers_pool = {} self.stat_counter = {} self.uid_port_table = {} #获取TrueIP配置 self._true_ip_config = get_config().TRUE_IP
def pull_db_all_user(self): import json rows = None config_path = get_config().MUDB_FILE with open(config_path, 'rb+') as f: rows = json.loads(f.read().decode('utf8')) for row in rows: try: if 'forbidden_ip' in row: row['forbidden_ip'] = common.IPNetwork(row['forbidden_ip']) except Exception as e: logging.error(e) try: if 'forbidden_port' in row: row['forbidden_port'] = common.PortRange(row['forbidden_port']) except Exception as e: logging.error(e) try: if 'disconnect_ip' in row: row['disconnect_ip'] = common.IPNetwork(row['disconnect_ip']) except Exception as e: logging.error(e) return rows
def main(): logging.basicConfig(level=logging.INFO, format="%(levelname)-s: %(message)s") shell.check_python() if get_config().API_INTERFACE == "modwebapi": threadMain = MainThread(web_transfer.WebTransfer) else: threadMain = MainThread(db_transfer.DbTransfer) threadMain.start() threadSpeedtest = MainThread(speedtest_thread.Speedtest) threadSpeedtest.start() threadAutoexec = MainThread(auto_thread.AutoExec) threadAutoexec.start() threadAutoblock = MainThread(auto_block.AutoBlock) threadAutoblock.start() try: while threadMain.is_alive(): threadMain.join(10.0) except (KeyboardInterrupt, IOError, OSError) as e: import traceback traceback.print_exc() threadMain.stop() if threadSpeedtest.is_alive(): threadSpeedtest.stop() if threadAutoexec.is_alive(): threadAutoexec.stop() if threadAutoblock.is_alive(): threadAutoblock.stop()
def thread_db(obj): import socket import time global db_instance timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = obj() shell.log_shadowsocks_version() import resource logging.info('current process RLIMIT_NOFILE resource: soft %d hard %d' % resource.getrlimit(resource.RLIMIT_NOFILE)) try: while True: load_config() try: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() db_instance.del_server_out_of_bound_safe(last_rows, rows) db_instance.detect_text_ischanged = False db_instance.detect_hex_ischanged = False last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) #logging.warn('db thread except:%s' % e) if db_instance.event.wait(get_config().MYSQL_UPDATE_TIME) or not db_instance.is_all_thread_alive(): break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None
def thread_db(): ''' :param obj: 就是DbTransfer 线程的入口函数 ''' logging.debug('thread_db') import socket global db_instance timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = DbTransfer() ServerPool.get_instance() shell.log_shadowsocks_version() try: import resource logging.info( 'current process RLIMIT_NOFILE resource: soft %d hard %d' % resource.getrlimit(resource.RLIMIT_NOFILE)) except: pass rows = db_instance.pull_db_all_user() try: while True: load_config() try: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() if rows: db_instance.pull_ok = True config = shell.get_config(False) for port in config['additional_ports']: val = config['additional_ports'][port] val['port'] = int(port) val['enable'] = 1 val['transfer_enable'] = 1024**7 val['u'] = 0 val['d'] = 0 if "password" in val: val["passwd"] = val["password"] rows.append(val) db_instance.del_server_out_of_bound_safe(last_rows, rows) last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) # self.logger.warn('db thread except:%s' % e) if db_instance.event.wait( get_config().UPDATE_TIME ) or not ServerPool.get_instance().thread.is_alive(): break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None
def __init__(self): super(Dbv3Transfer, self).__init__() self.key_list += ['method'] # 在 LegendSock 中这个 id 没有意义所以注释 # self.key_list += ['id', 'method'] if get_config().API_INTERFACE == 'sspanelv3ssr': self.key_list += ['obfs', 'protocol'] self.start_time = time.time()
def load_cfg(self): import json config_path = get_config().MYSQL_CONFIG cfg = None with open(config_path, 'r+') as f: cfg = json.loads(f.read().decode('utf8')) if cfg: self.cfg.update(cfg)
def main(): shell.check_python() if False: db_transfer.DbTransfer.thread_db() else: if get_config().API_INTERFACE == 'mudbjson': thread = MainThread(db_transfer.MuJsonTransfer) elif get_config().API_INTERFACE == 'sspanelv2': thread = MainThread(db_transfer.DbTransfer) else: thread = MainThread(db_transfer.Dbv3Transfer) thread.start() try: while thread.is_alive(): time.sleep(10) except (KeyboardInterrupt, IOError, OSError) as e: import traceback traceback.print_exc() thread.stop()
def thread_db(obj): import socket import time global db_instance timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = obj() ServerPool.get_instance() shell.log_shadowsocks_version() try: import resource logging.info('current process RLIMIT_NOFILE resource: soft %d hard %d' % resource.getrlimit(resource.RLIMIT_NOFILE)) except: pass try: while True: load_config() db_instance.load_cfg() try: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() if rows: db_instance.pull_ok = True config = shell.get_config(False) for port in config['additional_ports']: val = config['additional_ports'][port] val['port'] = int(port) val['enable'] = 1 val['transfer_enable'] = 1024 ** 7 val['u'] = 0 val['d'] = 0 if "password" in val: val["passwd"] = val["password"] rows.append(val) db_instance.del_server_out_of_bound_safe(last_rows, rows) last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) #logging.warn('db thread except:%s' % e) if db_instance.event.wait(get_config().UPDATE_TIME) or not ServerPool.get_instance().thread.is_alive(): break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None
def update_all_user(self, dt_transfer): import json rows = None config_path = get_config().MUDB_FILE with open(config_path, 'rb+') as f: rows = json.loads(f.read().decode('utf8')) for row in rows: if "port" in row: port = row["port"] if port in dt_transfer: row["u"] += dt_transfer[port][0] row["d"] += dt_transfer[port][1] if rows: output = json.dumps(rows, sort_keys=True, indent=4, separators=(',', ': ')) with open(config_path, 'r+') as f: f.write(output) f.truncate() return dt_transfer
def pull_db_all_user(self): import json rows = None config_path = get_config().MUDB_FILE with open(config_path, 'rb+') as f: rows = json.loads(f.read().decode('utf8')) for row in rows: try: if 'forbidden_ip' in row: row['forbidden_ip'] = common.IPNetwork(row['forbidden_ip']) except Exception as e: logging.error(e) try: if 'forbidden_port' in row: row['forbidden_port'] = common.PortRange(row['forbidden_port']) except Exception as e: logging.error(e) if not rows: logging.warn('no user in json file') return rows
def __init__(self): super(Dbv3Transfer, self).__init__() self.key_list += ['id', 'method'] if get_config().API_INTERFACE == 'sspanelv3ssr': self.key_list += ['obfs', 'protocol'] self.start_time = time.time()
""" import sys sys.dont_write_bytecode = True import time from threading import Thread import irc import pluginmanager import configloader import traceback config_full = configloader.get_config("config/cee.conf") def run_command(message, plugin_manager, IRC): for plugin in plugin_manager.plugins: try: if plugin.plugin_object.handle_call( message, plugin_manager=plugin_manager, connection=IRC, command_prefix=config_full.get("command_prefix", "") ): break except Exception as e: print(e) traceback.print_exc(file=sys.stdout)