def main(): logger.init_log('apns_agent', 'apns_agent') setproctitle('apns:agent') svr = IncomingServer(handle_req, log=False) svr.bind(parse_ip_port(apns_conf.get('_default', 'host'))[-1]) svr.start(0) IOLoop.instance().start()
def run_process(redis_url, process_ratio, run_or_not): """ 通过load_conf.py读取配置,统一加载 :param redis_url: 监控redis地址 :param process_ratio: 启动进程数 :param run_or_not: 是否启动该进程 :return: """ host, port, db, pwd, channel = resolve_redis_url(redis_url, True) logger_flag = "mq_%s" % channel logger.init_log(logger_flag, logger_flag) if not (redis_url and isinstance(redis_url, str) and '|' not in redis_url): logger.fatal('redis_url invalid: %s' % redis_url) return if not isinstance(process_ratio, int): logger.fatal('process_ratio invalid: %s' % type(process_ratio)) return if not isinstance(run_or_not, int): logger.fatal('run param invalid: %s' % type(run_or_not)) return if not run_or_not: logger.debug('not run {0} via config'.format(redis_url)) return setproctitle('mq:host:{0}'.format(channel)) fork_processes(process_ratio) options.logging = 'error' setproctitle('mq:{0}:{1}'.format(channel, db)) loop = IOLoop.instance() setattr(__builtins__, 'handler_map', {}) setattr(__builtins__, 'watch_redis', AsyncRedis(None, False, host, port, db, pwd)) setattr(__builtins__, 'watch_channel', channel) setattr(__builtins__, 'dev_filter', AsyncRedis(redis_conf.get('dev_filter', 'url'))) setattr( __builtins__, 'level_client', GeneralInternalClient( parse_ip_port(leveldb_conf.get('default', 'host')))) setattr(__builtins__, 'account_cache', AsyncRedis(redis_conf.get('oauth', 'url'))) logger.warn('run_process channel:{0}, db:{1}, redis_url={2}'.format( channel, db, redis_url)) # 导入handler模块 mod_name = '.'.join((channel, 'uni_handler', 'handle_msg')) handler = import_object(mod_name) logger.warn('import mod:{0}'.format(mod_name)) if handler: logger.warn('found handler={0}'.format(mod_name)) handler_map.update({channel: handler}) loop.add_callback(coroutine_run) loop.start()
def main(): logger.init_log('lvl_core', 'lvl_core') setproctitle('lvl:core') IncomingServer(handle_req, log=False).listen( parse_ip_port( dict(conf_load('leveldb.ini')).get('leveldb.ini').get( 'default', 'host'))[-1]) IOLoop.instance().start()
def __init__(self, service_type, service_version, is_sm=False, db_update_dir_path=None, use_mysqldb=False, use_orm=False, use_redis=False): self.service_type = service_type self.service_version = service_version self.thread_ls = [] self.is_sm = is_sm self.db_update_dir_path = db_update_dir_path self.use_mysqldb = use_mysqldb self.use_orm = use_orm self.use_redis = use_redis self.adv = None self.sm_rpc = None logger.init_log(self.service_type, self.service_type) ExitHandler().add_exit_handler(self.stop_service) arg_parser = ArgumentParser() p = arg_parser.get_argparser() p.add_argument('--is_https', default=False, type=parser_boolean, help="Is use http ssl connection") p.add_argument('--http_port', default=0, type=int, help="The port of the http app listen") p.add_argument('--tcp_port', default=0, type=int, help="The port of of the tcp rpc app listen") p.add_argument('--service_type', default=service_type, type=str, help="The type of the service") p.add_argument('--service_version', default=use_redis, type=str, help="The version of the service") p.add_argument('--is_sm', default=is_sm, type=bool, help="Whether it is the service manager") p.add_argument('--db_update_dir_path', default=db_update_dir_path, type=str, help="The dir for db update use") p.add_argument('--use_mysqldb', default=use_mysqldb, type=parser_boolean, help="Whether to use the mysqldb lib") p.add_argument('--use_orm', default=use_orm, type=parser_boolean, help="Whether to use the orm db lib") p.add_argument('--use_redis', default=use_redis, type=parser_boolean, help="Whether to use the redis cache") p.add_argument('--logger_err_2_mail', default=False, type=bool, help="whether logger 2 mail when error") p.add_argument('--logger_err_2_sms', default=False, type=bool, help="whether logger 2 sms when error") self.add_cmd_opts(p) IpFinder().is_extranet = arg_parser.args.is_extranet if self.db_update_dir_path: self._db_update() if self.use_mysqldb: self._start_mysqldb() if self.use_orm: self._start_orm() if self.use_redis: self._start_redis_client() args = arg_parser.args arg_parser.will_change = False self.prepare(args) self.init(args) self._set_logger(args) logger.set_logger_level(args.logger_level)
def __init__(self, service_type, service_version, is_sm=False, db_update_dir_path=None, use_mysqldb=False, use_orm=False, use_redis=False): self.service_type = service_type self.service_version = service_version self.thread_ls = [] self.is_sm = is_sm self.db_update_dir_path = db_update_dir_path self.use_mysqldb = use_mysqldb self.use_orm = use_orm self.use_redis = use_redis self.adv = None self.sm_rpc = None logger.init_log(self.service_type, self.service_type) ExitHandler().add_exit_handler(self.stop_service) arg_parser = ArgumentParser() p = arg_parser.get_argparser() self.add_cmd_opts(p) p.add_argument('--service_type', default=service_type, type=str, help="The type of the service") p.add_argument('--service_version', default=use_redis, type=str, help="The version of the service") p.add_argument('--is_sm', default=is_sm, type=bool, help="Whether it is the service manager") p.add_argument('--db_update_dir_path', default=db_update_dir_path, type=str, help="The dir for db update use") p.add_argument('--use_mysqldb', default=use_mysqldb, type=parser_boolean, help="Whether to use the mysqldb lib") p.add_argument('--use_orm', default=use_orm, type=parser_boolean, help="Whether to use the orm db lib") p.add_argument('--use_redis', default=use_redis, type=parser_boolean, help="Whether to use the redis cache") p.add_argument('--logger_mask', default='0000', type=str, help="The logger mask em es fm fs") IpFinder().is_extranet = arg_parser.args.is_extranet if not self.is_sm: SMParamParser(service_type=self.service_type, sm_rpc=ParamCacher().sm_rpc, arg_parser=p, rdm_port_fun=get_random_port) self.add_cmd_opts_after_sm(p) if self.db_update_dir_path: self._db_update() if self.use_mysqldb: self._start_mysqldb() if self.use_redis: self._start_redis_client() args = arg_parser.args arg_parser.will_change = False self.init(args) logger.set_logger_level(args.logger_level)
import gevent from gevent import threading # remove unused utruner args if len(sys.argv) >= 3: if "utrunner.py" in sys.argv[0]: sys.argv = sys.argv[1:-1] cur_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) site.addsitedir(cur_path) site.addsitedir(os.path.join(cur_path, "utest_workspace")) site.addsitedir(os.path.join(os.path.dirname(cur_path), "workspace")) site.addsitedir(os.path.join(os.path.dirname(cur_path), "workspace", "common_server")) from utils import error_code, logger, crypto logger.init_log("unittest", "unittest") from utils.network.tcp import TcpRpcClient, TcpRpcServer, TcpRpcHandler from utils.network.http import HttpRpcServer, HttpRpcHandler, HttpRpcClient from utils.network.udp import UdpServer, UdpClient from utils.network.xmpp import XMPPClient from utils.opcode import xmpp from utils.wapper.xmpp import xmpp_handler from utils.wapper.web import web_adaptor from utils.route import Route, route from utils.meta.singleton import Singleton from utils.data.cache import redis_client from utils.crypto.sign import sign, checksign, Signer from utils.service_control import setting as service_control_setting from utils.service_control.cacher import ServiceMgrCacher, ParamCacher from utils.service_control.setting import RT_HASH_RING, RT_CPU_USAGE_RDM
def __init__(self, service_type, service_version, is_sm=False, db_update_dir_path=None, use_mysqldb=False, use_orm=False, use_redis=False): self.service_type = service_type self.service_version = service_version self.thread_ls = [] self.is_sm = is_sm self.db_update_dir_path = db_update_dir_path self.use_mysqldb = use_mysqldb self.use_orm = use_orm self.use_redis = use_redis self.adv = None self.sm_rpc = None setproctitle(self.service_type) logger.init_log(self.service_type, self.service_type) ExitHandler().add_exit_handler(self.stop_service) arg_parser = ArgumentParser() p = arg_parser.get_argparser() p.add_argument('--is_https', default=False, type=parser_boolean, help="Is use http ssl connection") p.add_argument('--http_port', default=0, type=int, help="The port of the http app listen") p.add_argument('--tcp_port', default=0, type=int, help="The port of of the tcp rpc app listen") p.add_argument('--service_type', default=service_type, type=str, help="The type of the service") p.add_argument('--service_version', default=use_redis, type=str, help="The version of the service") p.add_argument('--is_sm', default=is_sm, type=bool, help="Whether it is the service manager") p.add_argument('--db_update_dir_path', default=db_update_dir_path, type=str, help="The dir for db update use") p.add_argument('--use_mysqldb', default=use_mysqldb, type=parser_boolean, help="Whether to use the mysqldb lib") p.add_argument('--use_orm', default=use_orm, type=parser_boolean, help="Whether to use the orm db lib") p.add_argument('--use_redis', default=use_redis, type=parser_boolean, help="Whether to use the redis cache") p.add_argument('--logger_err_2_mail', default=False, type=bool, help="whether logger 2 mail when error") p.add_argument('--logger_err_2_sms', default=False, type=bool, help="whether logger 2 sms when error") self.add_cmd_opts(p) IpFinder().is_extranet = arg_parser.args.is_extranet if self.db_update_dir_path: self._db_update() if self.use_mysqldb: self._start_mysqldb() if self.use_orm: self._start_orm() if self.use_redis: self._start_redis_client() args = arg_parser.args arg_parser.will_change = False self.prepare(args) self.init(args) self._set_logger(args) logger.set_logger_level(args.logger_level)
import logging import os import sys _cur_dir = os.path.dirname(os.path.abspath(__file__)) sys.path.append("%s/../" % _cur_dir) import time import unittest from utils.data_io import get_data from feature.feature_generator import FeatureGenerator from model.word_vec.fasttext import fasttext_training from model.word_vec.fasttext import load_fasttext_model from utils.logger import init_log init_log() fastttext_data_dir = "test/data/jinyong" fasttext_model_path = "test/output/fasttext_model" def get_similar(text, model): if type(text) != type(u""): text = text.decode("gb18030", "ignore") print("text: %s" % text) res_list = model.wv.most_similar(text) print(u"原词:%s " % text) print(u"相似词\t相似度") for key, value in res_list: print("%s\t%f" % (key, value))
from gevent import threading # remove unused utruner args if len(sys.argv) >= 3: if "utrunner.py" in sys.argv[0]: sys.argv = sys.argv[1:-1] cur_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) site.addsitedir(cur_path) site.addsitedir(os.path.join(cur_path, "utest_common_server")) site.addsitedir(os.path.join(os.path.dirname(cur_path), "workspace")) site.addsitedir( os.path.join(os.path.dirname(cur_path), "workspace", "common_server")) from utils import error_code, logger, crypto logger.init_log("unittest", "unittest") from utils.network.tcp import TcpRpcClient, TcpRpcServer, TcpRpcHandler from utils.network.http import HttpRpcServer, HttpRpcHandler, HttpRpcClient from utils.wapper.web import web_adaptor from utils.route import Route, route from utils.meta.singleton import Singleton from utils.crypto.sign import sign, checksign, Signer from utils.service_control import setting as service_control_setting from utils.service_control.cacher import ServiceMgrCacher, ParamCacher from utils.service_control.setting import ST_MMM, ST_MMM_DA from utils.setting import enum from lib.common_fun import random_str from lib.setting import SYNC_WAIT_TIME
os.makedirs(output_dir) save_yaml(os.path.join(output_dir, 'config.yaml')) shutil.copy(yaml_file_path, os.path.join(output_dir, 'neat_config.yaml')) # set IPU gcop.safe_mode_on() gcop.set_options(cfg.SESSION) gcop.set_seed(cfg.TRAIN.SEED) gcop.set_memory_proportion(cfg.TRAIN.AVAILABLE_MEMORY_PROPORTION) if cfg.MODEL.LOAD_STRICT: gcop.set_load_strict() # init log logger.init_log(output_dir, log_name=cfg.task_name, resume=cfg.TRAIN.RESUME, tb_on=cfg.TRAIN.TB_ON, wandb_on=cfg.TRAIN.WANDB_ON) logger.log_str('output dir:', output_dir) # set data train_dataloader = get_data_loader(cfg) iters_per_epoch = len(train_dataloader) train_dataloader_iter = iter(train_dataloader) train_size = iters_per_epoch * cfg.TRAIN.BATCH_SIZE logger.log_str('{:d} roidb entries'.format(train_size)) IM_WIDTH, IM_HEIGHT = cfg.INPUT_SIZE input_im_shape = [1, 3, IM_HEIGHT, IM_WIDTH] # load initializers init_weights_path = cfg.INIT_WEIGHTS_PATH
def set_default_args(self): """ set default flags. These default flags will work when user doesnot define in conf file. These default flags will be covered when user has defined in conf file. """ logger.init_log("./logs/paddle_frame") flags.DEFINE_string('dataset_dir', './train_data/', 'set default dataset_dir') flags.DEFINE_string('file_list', None, 'set default file_list') flags.DEFINE_string('file_pattern', 'part-', 'set sample filename pattern') flags.DEFINE_integer('batch_size', 1024, 'set default batch_size') flags.DEFINE_string('data_reader', 'pyreader', 'set default data_reader') flags.DEFINE_string('dataset_split_name', 'train', 'set default dataset_split_name') flags.DEFINE_string('dataset_mode', 'QueueDataset', 'set default dataset_mode') flags.DEFINE_integer('sample_seed', 1234, 'set default seed') flags.DEFINE_integer('num_gpus', 0, 'set default gpu index') flags.DEFINE_boolean('debug_mode', False, 'set default debug model') flags.DEFINE_string('platform', 'local-cpu', 'set default platform.') flags.DEFINE_string('init_pretrain_model', None, 'set init pretrain model with same network') flags.DEFINE_string('init_train_params', None, 'set init model params for train, e.g. glue word2vec.') flags.DEFINE_integer('num_epochs_input', 2, 'set default epochs') flags.DEFINE_integer('num_samples', 100, 'set default samples num') flags.DEFINE_integer('max_number_of_steps', None, 'set default max step num') flags.DEFINE_float('base_lr', 0.01, 'set default learning rate') flags.DEFINE_integer('py_reader_capacity', 128, 'set default py_reader capacity.') flags.DEFINE_boolean('py_reader_use_double_buffer', True, 'set_default py_reader use_double_buffer') flags.DEFINE_boolean('py_reader_iterable', True, 'set_default py_reader iterable') flags.DEFINE_integer('batch_shuffle_size', 0, 'batch data shuffle size, 0 not shuffle') flags.DEFINE_integer('num_preprocessing_threads', 1, 'num_preprocessing_threads for sample read') flags.DEFINE_integer('save_model_steps', 100, 'save model in steps') flags.DEFINE_boolean('reader_batch', False, 'read batch from user dataset') flags.DEFINE_boolean('drop_last_batch', True, 'drop last batch') flags.DEFINE_boolean('use_fp16', False, 'fp16') flags.DEFINE_float('init_loss_scaling', 1.0, 'init_loss_scaling') flags.DEFINE_integer('incr_every_n_steps', 1000, 'incr_every_n_steps') flags.DEFINE_integer('decr_every_n_nan_or_inf', 2, 'fp16 decr_every_n_nan_or_inf') flags.DEFINE_float('incr_ratio', 2.0, 'fp16 incr_ratio') flags.DEFINE_float('decr_ratio', 0.8, 'fp16 decr_ratio') flags.DEFINE_boolean('use_dynamic_loss_scaling', True, 'dynamic_loss_scaling')
(gateway_host + "/user/authorization", "GET"), )) def test_disable_url(url, method): """ 禁止访问的url """ res = http_requester(url, json_loads=False, method=method) print(res) assert res.status == 401 @pytest.mark.parametrize(('url', "method"), ( (gateway_host + "/user/phone", "GET"), (gateway_host + "/user/user_name", "GET"), (gateway_host + "/sms_code", "POST"), (gateway_host + "/user", "POST"), (gateway_host + "/user/authorization", "POST"), )) def test_auth_url(url, method): """ 授权访问的url """ res = http_requester(url, json_loads=False, method=method) print(res) assert res.status in (200, 404) if __name__ == '__main__': logger.init_log("test_gateway") pytest.main()
args = parse_args() change_cfg_by_yaml_file(args.yaml) # init outputs dir output_dir = cfg.output_dir if not os.path.exists(output_dir): os.mkdir(output_dir) # init log log_prefix = args.model_name if args.model_name != '' else '' log_prefix = '_inference' + log_prefix logger.init_log(output_dir, log_name=cfg.task_name, post_fix=log_prefix, resume=False, tb_on=False, wandb_on=False) logger.log_str('output dir:', output_dir) if cfg.TEST.DATASET == 'voc': imdb = get_imdb('voc_2007_test') elif cfg.TEST.DATASET == 'coco': imdb = get_imdb('coco_2017_val') else: raise ValueError("Unknown dataset!") imdb.competition_mode(False) val_size = imdb.num_images logger.log_str('{:d} roidb entries'.format(val_size))
import tornado.options from tornado import httpserver from tornado.ioloop import IOLoop from tornado.options import define, options from tornado.web import Application from logic.write import WriteHandler from logic.down_multi import MultiDownHandler from logic.read import ReadHandler from logic.delete import DeleteFileHandler from setting import SERVICE_TYPE from utils import logger define("port", help="run on the given port", type=int, default=8106) setproctitle(SERVICE_TYPE) logger.init_log(SERVICE_TYPE, SERVICE_TYPE) def main(): tornado.options.options.logging = 'debug' tornado.options.parse_command_line() #主从分离后,读取从库mysql settings = { #最大内存上限100K 'max_buffer_size': 102400, } app = Application([ ('/up', WriteHandler), ('/down', ReadHandler),
def watch_dispatch(): #监听更多通道,分散压力 watch_channels = [str(x) for x in xrange(1, 11)] #兼容 watch_channels.append('dispatch') while 1: try: _, unpacked_msg = dispatch_redis.blpop(watch_channels, 0) unpacked_msg = msgpack.unpackb(unpacked_msg, encoding='utf-8') dst_ch = unpacked_msg.get('c') redis_list = task_ch_redis.get(dst_ch) if not redis_list: logger.warn('mis_spell channel: %s' % dst_ch) continue randin = randint(0, len(redis_list) - 1) selected_redis = redis_list[randin] logger.warn('selected_redis:%s,dst_ch:%s, unpacked_msg:%s' % (selected_redis, dst_ch, unpacked_msg)) selected_redis.rpush(dst_ch, unpacked_msg.get('p')) except Exception, ex: logger.error('blpop fail: {0}'.format(ex), exc_info=True) if __name__ == '__main__': logger.init_log('mqd_job', 'mqd_job') fork_processes(10) setproctitle('mqd_job') watch_dispatch()
Author: zhanghao55([email protected]) Date: 2019/11/21 20:37:58 """ import logging import os import re import sys _cur_dir = os.path.dirname(os.path.abspath(__file__)) sys.path.append("%s/../../" % _cur_dir) from model.lr_model_impl import BaseLRModel from preprocess import ProcessFilePath from feature.feature_generator import FeatureGenerator from utils.logger import init_log init_log("./log/lr_model.log") import config class LRModelDemo(BaseLRModel): """LR分类模型基础类 """ def __init__(self, mid_data_dir, model_dir, output_dir): """ """ super(LRModelDemo, self).__init__(model_dir, output_dir) self.mid_data_paths = ProcessFilePath(output_dir=mid_data_dir) self.feature_generator = FeatureGenerator( seg_method=config.seg_method,