def use(broker, debug=True, **kwargs): """用于生成特定的券商对象 :param broker:券商名支持 ['yh_client', '银河客户端'] ['ht_client', '华泰客户端'] :param debug: 控制 debug 日志的显示, 默认为 True :param initial_assets: [雪球参数] 控制雪球初始资金,默认为一百万 :return the class of trader Usage:: >>> import easytrader >>> user = easytrader.use('xq') >>> user.prepare('xq.json') """ if not debug: log.setLevel(logging.INFO) if broker.lower() in ["xq", "雪球"]: return XueQiuTrader(**kwargs) if broker.lower() in ["yh_client", "银河客户端"]: from yh_clienttrader import YHClientTrader return YHClientTrader() if broker.lower() in ["ht_client", "华泰客户端"]: from ht_clienttrader import HTClientTrader return HTClientTrader() if broker.lower() in ["gj_client", "国金客户端"]: from gj_clienttrader import GJClientTrader return GJClientTrader() if broker.lower() in ["ths", "同花顺客户端"]: from clienttrader import ClientTrader return ClientTrader() raise NotImplementedError
def config_debug(params): if len(params) != 1 or params[0] not in log.getLevelInfo(): print "Use level %s" % '/'.join(log.getLevelInfo()) return print "before level is %s" % log.getLevel() log.setLevel(params[0]) print "now level is %s" % params[0]
def main(): """Parse commandline arguments and start the real work.""" args = parse_arguments() log.setLevel((3 - args.verbosity) * 10) info = HostsInfo(args.infile) if args.format == 'hosts': gen_etc_hosts(info) elif args.format == 'fstab': gen_fstab(info, args.group, args.user) elif args.format == 'pshelper': gen_pshelper(info, args.out)
def use(broker, debug=True, **kwargs): if not debug: log.setLevel(logging.INFO) if broker.lower() in ['ths', '同花顺']: from clienttrader import ClientTrader return ClientTrader() elif broker.lower() in ['gj_client', '国金客户端']: from gj_clienttrader import GJClientTrader return GJClientTrader() raise NotImplemented
def cli(opt, debug, path): ''' This script is built upon worker lib\n e.g.\n ''' opt.debug = debug if (debug): log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) if path is None: path = '.' opt.path = path
def send_heartbeat(self): """每隔10秒查询指定接口保持 token 的有效性""" while True: if self.heart_active: try: log_level = log.level log.setLevel(logging.ERROR) response = self.heartbeat() self.check_account_live(response) log.setLevel(log_level) except: self.autologin() time.sleep(10) else: time.sleep(1)
def loglevel(irc, source, args): """<level> Sets the log level to the given <level>. <level> must be either DEBUG, INFO, WARNING, ERROR, or CRITICAL. If no log level is given, shows the current one.""" utils.checkAuthenticated(irc, source, allowOper=False) try: level = args[0].upper() try: loglevel = loglevels[level] except KeyError: irc.reply('Error: Unknown log level "%s".' % level) return else: log.setLevel(loglevel) irc.reply("Done.") except IndexError: irc.reply(log.getEffectiveLevel())
def check_login(self, sleepy=30): log.setLevel(logging.ERROR) try: response = self.heartbeat() self.check_account_live(response) except requests.exceptions.ConnectionError: pass except requests.exceptions.RequestException as e: log.setLevel(self.log_level) log.error("心跳线程发现账户出现错误: %s %s, 尝试重新登陆", e.__class__, e) self.autologin() finally: log.setLevel(self.log_level) time.sleep(sleepy)
def gen_stats(f_in, f_out, label=False, deltas=[1], thresh=0, verbosity=0): """Parse and process tracks and calculate statistics from the data.""" # default loglevel is 30 while 20 and 10 show more details loglevel = (3 - verbosity) * 10 log.setLevel(loglevel) log.warn("Infile: %s" % f_in) log.debug("Outfile: %s" % f_out) log.info("Stepping width(s): %s" % deltas) log.info("Angle threshold: %s" % thresh) ppr = pprint.PrettyPrinter(indent=4) ######### tracks parsing ######### # TODO: parsing can be done in a nicer way be reading the header lines via # csvreader.next(), checking for the expected values and the number of # tracks and then directly reading the trackpoints into a numpy ndarray... mtrack2_file = filehandle(f_in, "r") csvreader = csv.reader(mtrack2_file, delimiter="\t") # parse all lines into memory # NOTE: this is bad if the files get too large, but we haven't seen result # files from MTrack2 that are bigger than a couple of MB. data = [] for row in csvreader: data.append([parse_cell(x) for x in row]) # data.append(row) # start parsing the header header = [] header.append(data.pop(0)) header.append(data.pop(0)) if not header[0][0] == "Frame": # exit because file is broken... raise SystemExit("Unable to find correct header, stopping.") log.debug("Header:\n%s\n" % ppr.pformat(header)) # second line is 'Tracks 1 to N', so we can read the total number there: trackmax = int(header[1][0].split(" ")[3]) log.info("Total number of tracks: %s" % ppr.pformat(trackmax)) # last N lines are the stats per track trackstats = [] while True: # pop returns the last element if no index is given cur = data.pop() if cur[0] == "Track": # remove one more line (empty), then we're done cur = data.pop() break else: trackstats.append(cur) # as we parsed from the last element, we need to reverse the list trackstats.reverse() log.warn("Track statistics:\n%s" % ppr.pformat(trackstats)) # this code can help debugging problematic files: # for row in data: # try: # np.array(row, dtype='float') # except ValueError: # raise SystemExit(row) # create the ndarray from the remaining data while removing column 0 # (indices), and every subsequent third column (flags) todelete = range(0, (trackmax + 1) * 3, 3) npdata = np.delete(data, todelete, axis=1) npdata_bool = npdata > 0 ######### tracks processing (combining etc.) ######### tracklen = [0] * trackmax t_overlap = npdata_bool[:, 0] for track in range(trackmax): tracklen[track] = sum(npdata_bool[:, track * 2]) t_overlap = t_overlap * npdata_bool[:, track * 2] if trackmax > 1 and sum(t_overlap) > 0: raise SystemExit("*** WARNING: Found overlapping tracks! ***") t_combined = np.zeros((npdata.shape[0], 2)) for track in range(trackmax): t_combined += npdata[:, track * 2 : (track + 1) * 2] comb_mask = np.zeros(t_combined.shape[0]) for i, row in enumerate(t_combined): if (row == [0.0, 0.0]).all(): # print 'row %i is zerooooo' % i comb_mask[i] = True t_combined = np.ma.compress_rows(np.ma.array(t_combined, mask=np.repeat(comb_mask, 2))) ######### calculations ######### mov_v = {} mov_n = {} rot = {} rot_t = {} outdata = t_combined if label: label = "pos_x\tpos_y" for step in deltas: # calculate movement vectors (mov_v): mov_v[step] = movement_vectors(t_combined, step) # calculate vector normals (mov_n): mov_n[step] = np.zeros((mov_v[step].shape[0], 1)) for pos in range(1, mov_n[step].shape[0]): mov_n[step][pos] = np.linalg.norm(mov_v[step][pos]) # calculate rotation: rot[step] = calc_rotation(mov_v[step], mov_n[step], step) # for the movement vectors all values need to be written to the output, # but it is not necessary to repeat them for every stepping, so they # are only added for stepping '1': if step == 1: outdata = np.hstack((outdata, mov_v[1])) if label: label += "\tdelta_x\tdelta_y" outdata = np.hstack((outdata, mov_n[step], rot[step])) # threshold rotation angles: if thresh > 0: rot_t[step] = np.where(abs(rot[step]) > thresh, rot[step], 0) outdata = np.hstack((outdata, rot_t[step])) if label: label += "\tdistance_%s\tangle_%s" % (step, step) if thresh > 0: label += "\tthresholded_angle_%s" % step if label: log.info("label: %s" % label) _save_results(f_out, outdata, label) log.warn("Wrote results to '%s'" % filename(f_out))
#!/usr/bin/python from imaris_xml import ImarisXML from log import log log.setLevel(20) basedir = 'TESTDATA/spots_distances/' infile = basedir + 'spots_red_multi_ws-all.xml' outfile = basedir + 'result_ImarisXML_sp_red_mult_all.txt' # test with filehandle: XML = ImarisXML(open(infile)) # test with string: XML = ImarisXML(infile) res = XML.celldata('Position') output = open(outfile, 'w') output.write(str(res)) print('Written results to "%s"' % outfile)
import curses import time import random import sys import book, feed ##, pnl from ChessBoard import ChessBoard import logging from log import log log.setLevel(logging.CRITICAL) prettyBoard = False if prettyBoard: try: import pygame import inspect, os chessboardDirectory = os.path.dirname(inspect.getfile(ChessBoard)) pygame.init() pygameScreen = pygame.display.set_mode((480, 480+240),1) pygame.display.set_caption('') # load all images pieces = [{},{}] pieces[0]["r"] = pygame.image.load(chessboardDirectory + "/img/brw.png") pieces[0]["n"] = pygame.image.load(chessboardDirectory + "/img/bnw.png") pieces[0]["b"] = pygame.image.load(chessboardDirectory + "/img/bbw.png") pieces[0]["k"] = pygame.image.load(chessboardDirectory + "/img/bkw.png") pieces[0]["q"] = pygame.image.load(chessboardDirectory + "/img/bqw.png") pieces[0]["p"] = pygame.image.load(chessboardDirectory + "/img/bpw.png")
from os import environ from ansible_wrapper import PlaybookRunner from log import log from distutils.util import strtobool import logging app = Flask(__name__) # environ('GITHUB_API_KEY') GITHUB_API_KEY = environ.get('GITHUB_API_KEY') GITHUB_ORG = environ.get('GITHUB_ORG') GITHUB_TEAM = environ.get('GITHUB_TEAM') CHECK_MODE = bool(strtobool(environ.get('CHECK_MODE', 'True'))) log.setLevel(logging.DEBUG) log.debug("Starting") log.debug("Github Org: {github_org}".format(github_org=GITHUB_ORG)) log.debug("Github Team: {github_team}".format(github_team=GITHUB_TEAM)) log.debug("Check mode: {check_mode}".format(check_mode=CHECK_MODE)) @app.route('/', methods=['POST']) @app.route('/refresh-accesses', methods=['POST']) def refresh(): runner = PlaybookRunner(playbook='fetch_keys.yml', extra_vars={ "github_api_key": GITHUB_API_KEY, "github_org": GITHUB_ORG, "github_team": GITHUB_TEAM