def launch(): log = logging.getLogger(__name__) log_init('./', 'coof') log.info("Connecting to db") init_db() log.info("Connected to db") log.info("Launching api") api.launch() log.info('Starting difficulty test') difficulty_test() log.info("finished diff test")
def tf_dump_model(modelname, revise_outs=True, default_input_shape=(1, 224, 224, 3)): utils.log_init() model_path = modelfile[modelname] outputs = outputs_list[modelname] sym, params = convert_model(model_path, outputs=outputs, default_input_shape=default_input_shape) sym, params = _fuse_pad(sym, params) if revise_outs: sym, params = _revise_output(modelname, sym, params) dump(modelname, sym, params)
def main(): log_init() log_info('Starting controller') try: ctrl = Controller(CONTROLLER_CONFIG, PH_CONFIG, PUMP_X_CONFIG, PUMP_Y_CONFIG, SOLUTION_TANK_CONFIG, SUPPLY_TANK_CONFIG) ctrl.run() log_err('Controller stopped running') except Exception as e: log_err(str(e)) log_exception_trace()
def data_process(quantize_flag, input_shape, gpu_flag, num_test): if quantize_flag: symbol = mx.sym.load('/home/test/tvm-cvm/data/ssd_512_mobilenet1.0_coco.all.quantize.json') params = nd.load('/home/test/tvm-cvm/data/ssd_512_mobilenet1.0_coco.all.quantize.params') pfx = 'quant_' else: symbol = mx.sym.load('/home/test/tvm-cvm/data/ssd_512_mobilenet1.0_coco.json') params = nd.load('/home/test/tvm-cvm/data/ssd_512_mobilenet1.0_coco.params') pfx = 'org_' if gpu_flag: ctx = mx.gpu(3) pfx += 'gpu_' else: ctx = mx.cpu() pfx += 'cpu_' params = convert_params_dtype(params, dest_dtype="float32") utils.log_init() logger = logging.getLogger('main') stimes = {} for iter_num in range(num_test): for opn, dct in get_mxnet_outs(symbol, params, input_shape, ctx, gpu_flag, logger, iter_num).items(): if opn not in stimes: stimes[opn] = {'sample_total': []} stimes[opn]['sample_total'].append(dct['total']) for opn, dct in stimes.items(): stimes[opn]['mean_total'] = sum(dct['sample_total'][1:]) / len(dct['sample_total'][1:]) arr = sorted([(stimes[opn]['mean_total'], opn) for opn in stimes], reverse=True) total = sum([dct['mean_total'] for opn, dct in stimes.items()]) s = 'total forward time: %s second\n'%total s += '\n' for _, opn in arr: dct = stimes[opn] # s += 'op_name: %s\nmin: %s second\nmax: %s second\nmean: %s second\nstd: %s second\ntotal: %s second\n'%\ # (opn, dct['max'], dct['min'], dct['mean'], dct['std'], dct['total']) s += 'op: %s\ntotal: %s second\n'%(opn, dct['mean_total']) s += '---------------------------\n' s += '---------------------------\n' s += '\n' filename = '/home/test/'+pfx+'test.txt' with open(filename, 'w') as f: f.write(s)
prev_path = main_path + "/.." sys.path.append (prev_path) from sipcore import * from uri import * from rand import * from Config import Config from utils import _redis_connect, log_init from client import ClientRedis from dialog import dialog_handler config_path = main_path + "/../config" config_file = "%s/config_register.py"%config_path # configuration config = Config (config_file) # logger logger = log_init (__file__, config.log_level or logging.DEBUG, config.log_local or "local3") # redis _redis = _redis_connect (config.redis_host) if not _redis: print >> sys.stderr, "Cannot connect to Redis" sys.exit (0) client = ClientRedis (_redis, debug = config.debug) def pre_handler (client, session_id, data): s_id, timestamp, addr, pkt = data session_id ['pkt']['inbound']['last_packet'] = (time.time (), pkt) def post_handler (client, session_id, data): s_id, timestamp, addr, pkt = data for _pkt in session_id ['sending_queue']:
#!/usr/bin/env python # coding=utf-8 import re import sys import os import utils import xlrd from xml.dom import minidom reload(sys) sys.setdefaultencoding('utf-8') log = utils.log_init("xls2xml", 'debug') if len(sys.argv) < 2: print "no enough input" sys.exit() else: print "python ", sys.argv[0], " string.xls " print "or" print "python ", sys.argv[0], " string.xls app/src/main/res/" #for xml in `find KidsWatch/app/ -name "strings.xml"`; do ./scripts/multilangreplace.py scripts/android.xls $xml; done xls_name = sys.argv[1] if not os.path.exists(xls_name): log.error("The xls file '%s' is not exist." % xls_name) sys.exit() sheet_index = 0 log.info("xls file {0}, sheet index {1}".format(xls_name, sheet_index)) sheet = xlrd.open_workbook(xls_name).sheet_by_index(sheet_index) langidx = {} langcode = utils.LangCodeIdx()
def load_fname(version, suffix=None, with_ext=False): suffix = "." + suffix if suffix is not None else "" prefix = "./data/alexnet%s%s" % (version, suffix) return utils.extend_fname(prefix, with_ext=with_ext) batch_size = 700 input_size = 224 inputs_ext = {'data': {'shape': (batch_size, 3, input_size, input_size)}} inputs = [mx.sym.var(n) for n in inputs_ext] # ctx = mx.gpu(2) ctx = [mx.gpu(int(i)) for i in "1,2,3,4,5,6,7".split(',') if i.strip()] utils.log_init() data_iter = ds.load_imagenet_rec(batch_size, input_size) def data_iter_func(): data = data_iter.next() return data.data[0], data.label[0] data, _ = data_iter_func() sym_file, param_file = load_fname("") net1 = utils.load_model(sym_file, param_file, inputs, ctx=ctx) acc_top1 = mx.metric.Accuracy() acc_top5 = mx.metric.TopKAccuracy(5)
def tf_dump_model(modelname): utils.log_init() model_path = modelfile[modelname] sym, params = convert_model(model_path) sym, params = _fuse_pad(sym, params) dump(modelname, sym, params)
def test_mrt_quant(batch_size=1, iter_num=10): ctx = mx.gpu(3) input_shape = (38, batch_size) inputs = [mx.sym.var('data')] utils.log_init() data_iter = ds.load_trec(batch_size) def data_iter_func(): return next(data_iter) data, label = data_iter_func() sym_path, prm_path = load_fname() model_name, _ = path.splitext(path.basename(sym_path)) model_dir = path.dirname(sym_path) model = Model.load(sym_path, prm_path) model = init(model, input_shape) net1 = model.to_graph(ctx=ctx) def trec(data): res = net1(data.as_in_context(ctx)) return res qsym, qparams, inputs_qext = None, None, None if True: mrt = MRT(model) mrt.set_data(data) mrt.calibrate(ctx=ctx) mrt.set_input_prec(16) # mrt.set_fixed('data') mrt.set_output_prec(8) mrt.quantize() mrt.save(model_name + ".mrt.quantize", datadir=model_dir) # mrt.compile("trec_tfm", datadir="/data/ryt") # data = sim.load_real_data(data, 'data', inputs_qext) # np.save("/data/ryt/trec_tfm/data.npy", # sim.load_real_data(data, 'data', inputs_qext).asnumpy().astype('int32')) # exit() else: inputs_qext['data']['data'] = data th_dict = calib.sym_calibrate(sym, params, inputs_qext, ctx=ctx) qsym, qparams, _ = calib.pure_int8_quantize(sym, params, inputs_qext, th_dict) net2 = mrt.current_model.to_graph(ctx=ctx) # net2 = gluon.nn.SymbolBlock(qsym, inputs) # utils.load_parameters(net2, qparams, ctx=ctx) inputs_qext = mrt.get_inputs_ext() def quantize(data): data = sim.load_real_data(data, 'data', inputs_qext) res = net2(data.as_in_context(ctx)) return res if False: inputs_qext['data']['shape'] = (38, 1) data = data[:, 0].reshape(38, 1) _mrt.std_dump(qsym, qparams, qinputs_ext, data, "trec", batch=True, data_dtype="int32", max_num=1000, dump_ops=["sentimentnet0_embedding0_fwd"]) opg.dump_file("take", [ "/data/std_out/trec/sentimentnet0_embedding0_fwd_0.mrt.dump.in.npy", "/data/std_out/trec/sentimentnet0_embedding0_fwd_1.mrt.dump.in.npy" ], [ "/data/std_out/trec/sentimentnet0_embedding0_fwd_0.mrt.dump.out.npy" ], "/data/std_out/trec/sentimentnet0_embedding0_fwd.attr") if True: while True: data, _ = next(data_iter) inputs_qext = mrt.get_inputs_ext() data = sim.load_real_data(data, 'data', inputs_qext) inputs_qext['data']['data'] = data spass.sym_dump_ops(mrt.current_model.symbol, mrt.current_model.params, inputs_qext, ctx=mx.gpu(3)) exit() utils.multi_eval_accuracy(trec, data_iter_func, quantize, iter_num=iter_num)
refCol = referLangIdx.get(lang) if refCol is not None: value = stripCellValue(referSh.cell(refRow, refCol).value) if refCol is None or value is None or len(value) == 0: value = stripCellValue(baseSh.cell(row, col).value) else: value = stripCellValue(baseSh.cell(row, col).value) outSh.write(cursor, col, value) wbk.save(target) if __name__ == '__main__': if len(sys.argv) < 2: usage_help("please give the action") sys.exit() action = sys.argv[1] log = log_init("msxls", 'debug') if action == "xmltoxls": androidxml_to_xls() elif action == "merge": merge(log) elif action == "sort": sort_sheet(sys.argv[2]) elif action == "split": split(log) elif action == "update": update(log) else: usage_help("invalid action '{0}'".format(action))