def main(as_mod=False): global optdict optlist, args = getopt.getopt(sys.argv[1:], 'bc:dhjs:') optdict = dict(optlist) if '-h' in optdict: print main.__doc__ return cfgpathes = ['cves.ini', '/etc/cves.ini'] if '-c' in optdict: cfgpathes.insert(0, optdict['-c']) utils.cfg = utils.getcfg(cfgpathes) utils.initlog(utils.cfg.get('log', 'level'), utils.cfg.get('log', 'file')) echo = utils.cfg.has_option('db', 'echo') and utils.cfg.getboolean('db', 'echo') utils.engine = sqlalchemy.create_engine(utils.cfg.get('db', 'url'), echo=echo) utils.sess = sqlalchemy.orm.sessionmaker(bind=utils.engine)() if '-j' in optdict: return cron_job() elif '-b' in optdict: return built_db() application = web_main() if as_mod: return application return bottle.run(app=application, host=app.config['baseurl'].hostname, port=app.config['baseurl'].port, reloader=True)
def main(): cfg = utils.getcfg([ 'serve.conf', '~/.webserver/serve.conf', '/etc/webserver/serve.conf']) utils.initlog(cfg.get('log', 'loglevel'), cfg.get('log', 'logfile')) addr = (cfg.get('main', 'addr'), cfg.getint('main', 'port')) engine = cfg.get('server', 'engine') if engine == 'apps': import apps ws = http.WebServer(apps.dis, cfg.get('log', 'access')) elif engine == 'wsgi': import app_webpy ws = http.WSGIServer(app_webpy.app.wsgifunc(), cfg.get('log', 'access')) else: raise Exception('invaild engine %s' % engine) server = cfg.get('server', 'server') if server == 'gevent': from gevent.server import StreamServer ws = StreamServer(addr, ws.handler) elif server == 'thread': ws = ThreadServer(addr, ws.handler) else: raise Exception('invaild server %s' % server) try: ws.serve_forever() except KeyboardInterrupt: pass
def __init__(self, parent=None): QMainWindow.__init__(self, parent) self.ui = thermui.Ui_Form() self.ui.setupUi(self) envconfname = 'THERM_CONFIG' confname = None if envconfname in os.environ: confname = os.environ[envconfname] if not confname: QMessageBox.critical(self, "therm", "No %s in environment" % envconfname) sys.exit(1) conf = conftree.ConfSimple(confname) utils.initlog(conf) self.logger = logging.getLogger() self.datarepo = conf.get('datarepo') self.scratchdir = conf.get('scratchdir') if not self.datarepo or not self.scratchdir: QMessageBox.critical(self, "therm", "No 'datarepo' or 'scratchdir' in config") sys.exit(1) self.remotesettingfile = os.path.join(self.datarepo, 'consigne') self.myscratchact = os.path.join(self.scratchdir, 'ui') self.myscratchdis = os.path.join(self.scratchdir, 'ui-') self.ctlscratch = os.path.join(self.scratchdir, 'ctl') if not os.path.exists(self.ctlscratch): QMessageBox.critical( self, "therm", "%s not found: controller not active" % self.ctlscratch) sys.exit(1) self._refreshTimer = QtCore.QTimer() self._refreshTimer.timeout.connect(self._periodic) self._refreshTimer.start(15 * 1000.0) self._readCtlValues() cf = None self.localactive = False if os.path.exists(self.myscratchact): # Local control self.localactive = True cf = conftree.ConfSimple(self.myscratchact) elif os.path.exists(self.myscratchdis): cf = conftree.ConfSimple(self.myscratchdis) if cf: self.localsetting = float(cf.get("localsetting") or 20.5) else: self.localsetting = 20.5 if self.localactive: self.ui.commandePB.setChecked(True) self.on_commandePB_toggled(True) else: self.ui.commandePB.setChecked(False) self.on_commandePB_toggled(False) self.ui.dial.setWrapping(True) self.lastDialValue = 0
def createRelease(trunkDir, installDir, tmpDir="/tmp"): # Get a release directory baseName = os.path.join(tmpDir, "toolkit_release.") for i in xrange(0, 10): releaseDir = baseName + str(i) if not os.path.exists(releaseDir): break if os.path.exists(releaseDir): raise Exception("unable to find release directory in tmpDir %s" % tmpDir) print "Creating release in %s" % releaseDir # install from the manifest pybuildDir = os.path.join(trunkDir, "build_system", "pybuild") sys.path.insert(0, pybuildDir) import manifest import utils utils.initlog(False) manifestFile = os.path.join(trunkDir, "release", "toolkit_release", "manifests", "toolkit_release.manifest") manifest.installFromManifest(manifestFile, installDir, releaseDir, level=0, overwrite=False, destdirExists=False, allArchitectures=False) print "Finished creating release" return releaseDir
def main(): utils.initlog(cfg.get('log', 'loglevel'), cfg.get('log', 'logfile')) sender = cfg.get('email', 'mail') cvelist = list(core.getcves(cfg)) logging.debug('cvelist length %d' % len(cvelist)) db.query('BEGIN') if cfg.getboolean('main', 'dryrun'): for i in db.select( ['channels', 'users'], what='channels.id, name, email, user, severity', where='channels.user = users.id'): c = core.Chan(db, i, cfg.getboolean('main', 'dryrun')) body = c.geninfo(cvelist) print body else: with utils.with_emailconfig(cfg) as srv: for i in db.select( ['channels', 'users'], what='channels.id, name, email, user, severity', where='channels.user = users.id'): c = core.Chan(db, i, cfg.getboolean('main', 'dryrun')) body = c.geninfo(cvelist) if body: sendmail(srv, sender, i, body) # remove readed record for more then half a year db.delete('readed', where='uptime < $ti', vars={'ti': int(time.time() - 180 * 86400)})
def create_demo_release(trunk, install_dir, work_dir, version="DEV"): """Create the demo release in the release_dir""" release_dir = os.path.join(work_dir, 'install') print "Creating demo release in %s from installation at %s" % (release_dir, install_dir) # Clean the target dir to make sure starting from scratch if os.path.isfile(work_dir): os.remove(work_dir) if os.path.isdir(work_dir): print "Removing %s" % work_dir shutil.rmtree(work_dir) assert not os.path.exists(work_dir) # Use build system utilities to create the release. pybuild_dir = os.path.normpath(os.path.join(trunk, 'build_system/pybuild')) assert os.path.isdir(pybuild_dir) sys.path.insert(0, pybuild_dir) import manifest import utils utils.initlog(False) manifestFile = os.path.join(trunk, 'release/demo_release/manifests/demo_release.manifest') manifest.installFromManifest(manifestFile, install_dir, release_dir, level=0, overwrite=False, destdirExists=False, allArchitectures=False) # Create version file which will show up in the about box versionFile = os.path.join(work_dir, ".version") open(versionFile, "w").write(version) print "Done creating release"
def init(): # Give ntpd a little time to adjust the date. time.sleep(60) envconfname = 'CLIMCAVE_CONFIG' confname = None if envconfname in os.environ: confname = os.environ[envconfname] if not confname: raise Exception("NO %s in environment" % envconfname) conf = conftree.ConfSimple(confname) utils.initlog(conf) global logger logger = logging.getLogger(__name__) global g_templog g_templog = conf.get('templog') pidfile = conf.get('pidfile') if not pidfile: pidfile = os.path.join(os.path.dirname(g_templog), 'climcave.pid') utils.pidw(pidfile) global g_idtempext g_idtempext = conf.get('idtempext') global g_idtempint g_idtempint = conf.get('idtempint') if not g_idtempext or not g_idtempint: logger.critical( "No idtempext or idtempint defined in configuration") sys.exit(1) global g_using_t2ss g_using_t2ss = conf.get('using_t2ss') if g_using_t2ss: idctl1 = conf.get('idctl1') if not idctl1: logger.critical("No idctl1 defined in configuration") sys.exit(1) import t2ssif t2ssif.init(idctl1) else: gpio_pin = int(conf.get("gpio_pin")) if not gpio_pin: logger.critical("No gpio_pin defined in configuration") sys.exit(1) global pioif import pioif pioif.init(gpio_pin)
def main(): global default_dir optlist, args = getopt.getopt(sys.argv[1:], 'd:h:l:') optdict = dict(optlist) if '-h' in optdict: print main.__doc__ return utils.initlog(optdict.get('-l', 'DEBUG')) default_dir = path.expanduser(optdict.get('-d', os.getcwd())) p = pool.Pool(10) for i in xrange(0, 11): get_page_pics(p, 'http://boards.4chan.org/s/%d' % i) p.join()
def main(): """ durain [-c configfile] [-h] options: * -c: config file * -h: show help """ optlist, args = getopt.getopt(sys.argv[1:], "c:f:l:h") optdict = dict(optlist) if "-h" in optdict: print main.__doc__ return cfg = utils.getcfg(optdict.get("-c", ["durian.conf", "~/.durian.conf", "/etc/durian/durian.conf"])) utils.initlog(cfg.get("log", "loglevel"), cfg.get("log", "logfile")) import http if cfg.has_section("pool"): http.connector.max_addr = cfg.getint("pool", "maxaddr") addr = (cfg.get("main", "addr"), cfg.getint("main", "port")) import proxy, manager p = proxy.Proxy(accesslog=cfg.get("log", "access")) p.application = manager.setup(p) if cfg.getboolean("log", "verbose"): p.VERBOSE = True import midware if cfg.has_section("auth"): auth = midware.Auth() if cfg.has_option("auth", "userfile"): auth.loadfile(cfg.get("auth", "userfile")) elif cfg.has_option("auth", "username"): auth.add(cfg.get("auth", "username"), cfg.get("auth", "password")) auth.setup(p) if cfg.has_section("cache"): store = None if cfg.get("cache", "engine") == "memory": store = midware.MemoryCache(cfg.getint("cache", "size")) if store: midware.Cache(store).setup(p) try: StreamServer(addr, p.handler).serve_forever() except KeyboardInterrupt: pass
def main(): global default_dir optlist, args = getopt.getopt(sys.argv[1:], 'd:h:l:') optdict = dict(optlist) if '-h' in optdict: print main.__doc__ return utils.initlog(optdict.get('-l', 'DEBUG')) default_dir = path.expanduser(optdict.get('-d', os.getcwd())) p = pool.Pool(100) for url in args: for page in get_pages(url): get_page_pics(p, page) p.join()
def main(): web.config.users = dict(cfg.items('users')) utils.initlog(cfg.get('log', 'loglevel'), cfg.get('log', 'logfile')) if web.config.rootdir: os.chdir(web.config.rootdir) optlist, args = getopt.getopt(sys.argv[1:], 'dhp:') optdict = dict(optlist) if '-h' in optdict: print main.__doc__ return port = int(optdict.get('-p') or cfg.get('main', 'port') or 9872) addr = (cfg.get('main', 'addr'), cfg.getint('main', 'port')) from gevent.pywsgi import WSGIServer print 'service port :%d' % cfg.getint('main', 'port') WSGIServer(addr, app.wsgifunc()).serve_forever()
def __init__(self): self.logger = utils.initlog('Console', 'Colsole.log') self.logger.info('initializing pusher...') self.push_threshold = self.cal_init_threshold() # 初始推送阈值,根据历史统计TopK的平均取值 self.total_num_predicted = self.pred_total_related() # 预测当天总相关推文数 self.windows_threshold = [0.618 for _ in range(profiles_size)] # 初始窗口阈值,黄金分割点 self.received_count = [0 for _ in range(profiles_size)] # 当前已接收相关推文数 self.pushed_count = [0 for _ in range(profiles_size)] # 当前已推送推文数 self.adjust_count = [0 for _ in range(profiles_size)] # 阈值调整次数 self.left_coeff = np.arange(0.9, 0.96, 0.01) # 未推满条数的惩罚系数 self.logger.info('pusher initialized!')
def __init__(self): self.logger = utils.initlog('Console', 'Colsole.log') self.logger.info('initializing pusher...') self.push_threshold = self.cal_init_threshold( ) # 初始推送阈值,根据历史统计TopK的平均取值 self.total_num_predicted = self.pred_total_related() # 预测当天总相关推文数 self.windows_threshold = [0.618 for _ in range(profiles_size) ] # 初始窗口阈值,黄金分割点 self.received_count = [0 for _ in range(profiles_size)] # 当前已接收相关推文数 self.pushed_count = [0 for _ in range(profiles_size)] # 当前已推送推文数 self.adjust_count = [0 for _ in range(profiles_size)] # 阈值调整次数 self.left_coeff = np.arange(0.9, 0.96, 0.01) # 未推满条数的惩罚系数 self.logger.info('pusher initialized!')
def create_toolkit_release(trunk, install_dir, work_dir, version="DEV"): """Create the vision toolkit release in the release_dir""" release_dir = os.path.join(work_dir, 'install') print "Creating vision toolkit release in %s from installation at %s" % ( release_dir, install_dir) # Clean the target dir to make sure starting from scratch if os.path.isfile(work_dir): os.remove(work_dir) if os.path.isdir(work_dir): print "Removing %s" % work_dir shutil.rmtree(work_dir) assert not os.path.exists(work_dir) # Use build system utilities to create the release. pybuild_dir = os.path.normpath(os.path.join(trunk, 'build_system/pybuild')) assert os.path.isdir(pybuild_dir) sys.path.insert(0, pybuild_dir) import manifest import utils utils.initlog(False) manifestFile = os.path.join( trunk, 'release/toolkit_release/manifests/toolkit_release.manifest') manifest.installFromManifest(manifestFile, install_dir, release_dir, level=0, overwrite=False, destdirExists=False, allArchitectures=False) # Create version file which will show up in the about box versionFile = os.path.join(work_dir, ".version") open(versionFile, "w").write(version) print "Done creating release"
def createRelease(trunkDir, installDir, tmpDir="/tmp"): # Get a release directory baseName = os.path.join(tmpDir, "demo_release.") for i in xrange(0, 10): releaseDir = baseName + str(i) if not os.path.exists(releaseDir): break if os.path.exists(releaseDir): raise Exception("unable to find release directory in tmpDir %s" % tmpDir) print "Creating release in %s" % releaseDir # install from the manifest pybuildDir = os.path.join(trunkDir, "build_system", "pybuild") sys.path.insert(0, pybuildDir) import manifest import utils utils.initlog(False) manifestFile = os.path.join(trunkDir, "release", "demo_release", "manifests", "demo_release.manifest") manifest.installFromManifest(manifestFile, installDir, releaseDir, level=0, overwrite=False, destdirExists=False, allArchitectures=False) print "Finished creating release" return releaseDir
def __init__(self, thread_name, event): super(Controller, self).__init__() self.name = thread_name self.threadEvent = event self.logger_info = utils.initlog('Console', 'Console.log') self.schedule = sched.scheduler(time.time, time.sleep) self.profiles_name, profiles = utils.load_profiles('profiles') self.related_tweets = [[] for _ in range(len(profiles))] # 当天相关推文记录,存储共离线分析 self.pushed_tweets = [[] for _ in range(len(profiles))] self.pushed_tweets_ids = set([]) self.related_tweets_hash = set([]) self.classifier = Classifier() self.ranker = self.load_ranker() self.pusher = Pusher()
def __init__(self, thread_name, event): super(Controller, self).__init__() self.name = thread_name self.threadEvent = event self.logger_info = utils.initlog('Console', 'Console.log') self.schedule = sched.scheduler(time.time, time.sleep) self.profiles_name, profiles = utils.load_profiles('profiles') self.related_tweets = [[] for _ in range(len(profiles))] self.pushed_tweets = [[] for _ in range(len(profiles))] self.pushed_tweets_ids = set([]) self.related_tweets_hash = set([]) self.classifier = Classifier() self.ranker = self.load_ranker() self.pusher = Pusher()
def __init__(self): self.logger = utils.initlog('Console', 'Colsole.log') self.logger.info('initializing classifier...') self.count = 0 self.w2v_model = self.load_model() self.profile_keywords, self.required_keywords, multigram_keywords = utils.load_keywords() self.inverted_index = utils.build_inverted_index(self.required_keywords) self.ac_tries = utils.build_actries(multigram_keywords) detector = ldig.LangDetector('./common/model.latin') self.param, self.labels, self.trie = detector.load_params() self.stopwords = nltk.corpus.stopwords.words('english') self.url_pattern = re.compile(r'(https?:/*)[^ ]+|#|@') self.tokenizer_pattern = r'''([a-z]\.)+[a-z]?|\w+(-\w+)*''' self.tags = ['JJ', 'NN', 'VB'] self.logger.info('classifier initialized!')
def __init__(self): self.logger = utils.initlog('Console', 'Colsole.log') self.logger.info('initializing classifier...') self.w2v_model = self.load_model() self.profile_keywords, self.required_keywords, multigram_keywords = utils.load_keywords() self.inverted_index = utils.build_inverted_index(self.required_keywords) self.ac_tries = utils.build_actries(multigram_keywords) self.keyword_map = [collections.defaultdict(float) for _ in range(225)] self.max_similarity = [0.0 for _ in range(225)] detector = ldig.LangDetector('./common/model.latin') self.param, self.labels, self.trie = detector.load_params() self.stopwords = nltk.corpus.stopwords.words('english') self.url_pattern = re.compile(r'(https?:/*)[^ ]+|#|@') self.tokenizer_pattern = r'''([a-z]\.)+[a-z]?|\w+(-\w+)*''' self.tags = ['JJ', 'NN', 'VB'] self.logger.info('classifier initialized!')
def main(): optlist, args = getopt.getopt(sys.argv[1:], 'c:hp:') optdict = dict(optlist) if '-h' in optdict: print main.__doc__ return cfg = utils.getcfg(optdict.get('-c', [ '/etc/lxcweb/lxcweb.conf', 'lxcweb.conf'])) utils.initlog(cfg.get('log', 'loglevel'), cfg.get('log', 'logfile')) if cfg.has_section('lxc'): lxc.global_configfile = cfg.get('lxc', 'config') lxc.default_lxcpath = cfg.get('lxc', 'lxcpath') lxc.sudoflag = cfg.getboolean('lxc', 'sudo') # if web.config.rootdir: os.chdir(web.config.rootdir) static_path = None for p in ['static', '/usr/share/lxcweb/static']: if path.isdir(p): static_path = p break assert static_path is not None, "static path not exist." app = web.application(( '/static/(.*)', serve_path(static_path), # info actions '/', serve_file(path.join(static_path, 'home.html')), '/list.json', lxcweb.ListJson, '/info/(.*).json', lxcweb.InfoJson, '/ps/(.*).json', lxcweb.PsJson, '/ps/.*', serve_file(path.join(static_path, 'ps.html')), '/config/(.*).json', lxcweb.ConfigJson, '/config/.*', serve_file(path.join(static_path, 'config.html')), # image actions '/clone/(.*)/(.*)', lxcweb.Clone, '/create/(.*)', lxcweb.Create, '/destroy/(.*)', lxcweb.Destroy, '/export/(.*)', lxcweb.Export, '/import/(.*)', lxcweb.Import, # container actions '/start/(.*)', lxcweb.Start, '/stop/(.*)', lxcweb.Stop, '/shutdown/(.*)', lxcweb.Shutdown, '/reboot/(.*)', lxcweb.Reboot, '/freeze/(.*)', lxcweb.Freeze, '/unfreeze/(.*)', lxcweb.Unfreeze, # runtime actions '/attach/(.*)', lxcweb.Attach, )) kw = {} port = int(optdict.get('-p') or cfg.getint('main', 'port')) if cfg.has_section('ssl'): keyfile = cfg.get('ssl', 'key') certfile = cfg.get('ssl', 'cert') if path.exists(keyfile) and path.exists(certfile): kw = {'keyfile': keyfile, 'certfile': certfile} if cfg.has_section('users'): app.add_processor(Authenticate( cfg.get('users', 'username'), cfg.get('users', 'password'))) from gevent.pywsgi import WSGIServer print 'service port :%d' % port WSGIServer(('', port), app.wsgifunc(), **kw).serve_forever()
# We rely on some pybuild utilities. TODO: better mechanism for putting this in sys.path # Assumes we're in build_system/unix/standalone_apps pybuildDir = os.path.normpath( os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../pybuild")) sys.path.insert(0, pybuildDir) # import after adding pybuildDir to path from standalone_utils import createRelease # initialize logging for pybuild utilities # need debug=True to be able to output from vitamind install import utils utils.initlog(True) # Copied from windows script def installVitamind(trunkDir, installDir): """Run the installVitamind.py script Installs ffmpeg, scipy and vitamind obfuscated pipeline to the install dir """ print 'installVitamind()' saveDir = os.getcwd() os.chdir(os.path.join(trunkDir, 'external/src/python_modules/vitamind')) utils.runCommand(['python', 'install_vitamind.py', "--force", installDir]) os.chdir(saveDir)
pythonVersion = sys.version[:3] # We rely on some pybuild utilities. TODO: better mechanism for putting this in sys.path # Assumes we're in build_system/unix/standalone_apps pybuildDir = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../pybuild")) sys.path.insert(0, pybuildDir) # import after adding pybuildDir to path from standalone_utils import createRelease # initialize logging for pybuild utilities # need debug=True to be able to output from vitamind install import utils utils.initlog(True) # Copied from windows script def installVitamind(trunkDir, installDir): """Run the installVitamind.py script Installs ffmpeg, scipy and vitamind obfuscated pipeline to the install dir """ print 'installVitamind()' saveDir = os.getcwd() os.chdir(os.path.join(trunkDir, 'external/src/python_modules/vitamind')) utils.runCommand(['python', 'install_vitamind.py', "--force", installDir]) os.chdir(saveDir) def patchLibs(libdir):
start = start.replace(hour=0, minute=0, second=0, microsecond=0) orders = SinaWeiboInteractOrder.queryTodo(session, today) for order in orders: logging.info('process Interact order: {id}'.format(id=order.order_id)) if SinaWeiboInteractChildOrder.search(session, order.order_id, start): continue child = SinaWeiboInteractChildOrder() child.parent_order_id = order.order_id child.create_at = datetime.now() child.user_id = order.user_id child.sina_blog_info_id = order.sina_blog_info_id child.purchase_number = order.purchase_number child.order_id = '{ss}{us}24'.format( ss=datetime.strftime(datetime.now(), '%Y%m%d%H%M%S'), us=str(Decimal.from_float(time.time()).quantize( Decimal('0.000')))[11:]) child.status = ORDER_STATUS_DOING child.update_at = datetime.now() child.pay_at = order.pay_at child.order_amount = 0 child.center = order.center child.priority = 1 child.flag = ORDER_FLAG_DEFAULT session.add(child) session.commit() if __name__ == "__main__": utils.initlog('Schedule') run()
import sqlalchemy, sqlalchemy.orm logger = logging.getLogger('main') app = bottle.default_app() optlist, args = getopt.getopt(sys.argv[1:], 'a:c:hp:') optdict = dict(optlist) app.config.load_config(optdict.get('-c', 'web.ini')) engine = sqlalchemy.create_engine(app.config['db.url']) sess = sqlalchemy.orm.sessionmaker(bind=engine)() app.config['db.engine'] = engine app.config['db.session'] = sess import utils utils.initlog(app.config.get('log.level', 'INFO'), app.config.get('log.logfile', '')) session_opts = { 'session.type': 'ext:database', 'session.url': app.config['db.url'], 'session.lock_dir': '/var/lock', 'session.cookie_expires': 3600, 'session.auto': True } application = SessionMiddleware(app, session_opts) @bottle.route('/static/<filename:path>') def _static(filename): return bottle.static_file(filename, root='static/')
#logging.info(content) seg = jieba.cut(content, cut_all=cut_all) #for s in seg: print s #seg = [s for s in seg] #logging.info(seg) content = [hex(hash(s) % HASH_MODE)[2:] for s in seg] w_cnt = len(content) content = ' '.join(content) else: content = "" #f_out.write("%s '%d |a %s |b c_cnt:%d w_cnt:%d %s\n" % # (label, id, content, c_cnt, w_cnt, s_x_cnt)) f_out.write("%s '%d |a %s |b c_cnt:%d w_cnt:%d\n" % (label, id, content, c_cnt, w_cnt)) #f_out.write("%s '%d |a %s\n" % # (label, id, content)) if __name__ == '__main__': initlog() parse = argparse.ArgumentParser(add_help=False) parse.add_argument('-t', action='store_true', default=False, help='for test file') parse.add_argument('srcfile') parse.add_argument('destfile') parse.add_argument('--cut_all', action='store_true', default=False, help='for cut all word') args = parse.parse_args() convert(args.srcfile, args.destfile, args.t, args.cut_all)
content = [hex(hash(s) % HASH_MODE)[2:] for s in seg] w_cnt = len(content) content = ' '.join(content) else: content = "" #f_out.write("%s '%d |a %s |b c_cnt:%d w_cnt:%d %s\n" % # (label, id, content, c_cnt, w_cnt, s_x_cnt)) f_out.write("%s '%d |a %s |b c_cnt:%d w_cnt:%d\n" % (label, id, content, c_cnt, w_cnt)) #f_out.write("%s '%d |a %s\n" % # (label, id, content)) if __name__ == '__main__': initlog() parse = argparse.ArgumentParser(add_help=False) parse.add_argument('-t', action='store_true', default=False, help='for test file') parse.add_argument('srcfile') parse.add_argument('destfile') parse.add_argument('--cut_all', action='store_true', default=False, help='for cut all word') args = parse.parse_args() convert(args.srcfile, args.destfile, args.t, args.cut_all)
def init(): # Give ntpd a little time to adjust the date. # time.sleep(60) envconfname = 'THERM_CONFIG' confname = None if envconfname in os.environ: confname = os.environ[envconfname] if not confname: raise Exception("NO %s in environment" % envconfname) conf = conftree.ConfSimple(confname) utils.initlog(conf) global logger logger = logging.getLogger(__name__) # Pid here is the process id, nothing to do with the control loop pidfile = conf.get('pidfile') if not pidfile: pidfile = '/tmp/thermostat.pid' utils.pidw(pidfile) global g_housetempids g_housetempids = conf.get('housetempids').split() if not g_housetempids: logger.critical("No housetempids defined in configuration") sys.exit(1) datarepo = conf.get('datarepo') if not datarepo: logger.critical("No 'datarepo' param in configuration") sys.exit(1) global g_uisettingfile, g_tempscratch scratchdir = conf.get('scratchdir') if scratchdir: g_uisettingfile = os.path.join(scratchdir, 'ui') g_tempscratch = os.path.join(scratchdir, 'ctl') else: g_uisettingfile = None g_tempscratch = None gitif.init(datarepo) thermlog.init(datarepo) gpio_pin = int(conf.get("gpio_pin")) if not gpio_pin: logger.critical("No fan_pin defined in configuration") sys.exit(1) pioif.init(gpio_pin) # Are we using a PID controller or an on/off global g_using_pid, g_heatingperiod g_using_pid = int(conf.get("using_pid") or 0) if g_using_pid: # PID coefs global g_kp, g_ki, g_kd # Using 100.0 means that we go 100% for 1 degrees of # error. This makes sense if the heating can gain 1 degree in # a period (1/2 hour). g_kp = float(conf.get('pid_kp') or 100.0) # The Ki needs to be somewhat normalized against the (very # long) sample period. We use the normalized half kp by # default, meaning that the Ki contribution on a single period # will be half the kp one. Of course it will go up over # multiple periods. g_ki = float(conf.get('pid_ki') or g_kp / (2.0 * g_heatingperiod)) g_kd = float(conf.get('pid_kd') or 0.0) else: global g_hysteresis g_hysteresis = float(conf.get('hysteresis') or 0.5)
logging.info('end') session.commit() session.close() def refund(session, model): orders = model.queryRefund(session) for order in orders: logging.info( 'found order {order} refund status {status}, flag {flag}'.format( order=order.order_id, status=order.status, flag=order.flag)) if order.order_amount and order.finished_number < order.purchase_number: amount = order.order_amount/order.purchase_number * \ (order.purchase_number-order.finished_number) amount = int(amount * 100) / 100 name, balance = CapitalFlow.refund(session, order, Decimal.from_float(amount)) logging.info( 'refund {order} {user} amount:{amount}, new balance: {balance}' .format(order=order.order_id, user=name, amount=amount, balance=balance)) else: order.flag = ORDER_FLAG_VERIFY if __name__ == "__main__": utils.initlog('refund') run()
version = a elif o == '--tmpdir': global tmpdir tmpdir = a elif o == '--debug': debug = True if install_dir is None or not os.path.isdir(install_dir) or target is None: if install_dir is not None: print "ERROR: install_dir %s does not exist" % install_dir if target is None: print "ERROR: target not specified" usage() # Initialize the logger utils.initlog(False) target = os.path.abspath(target) # Verify that all of the diretories are valid for d in (trunk_dir, install_dir, python_dir): assert os.path.isdir(d) try: # Run the vitamind install script (installs scipy and ffmpeg too) install_vitamind(install_dir) # Create the people tracker release create_people_tracker_release(trunk_dir, install_dir, work_dir, version) # Copy all necessary Python files copy_python(python_dir, work_dir)
version = a elif o == '--tmpdir': global tmpdir tmpdir = a elif o == '--debug': debug = True if install_dir is None or not os.path.isdir(install_dir) or target is None: if install_dir is not None: print "ERROR: install_dir %s does not exist" % install_dir if target is None: print "ERROR: target not specified" usage() # Initialize the logger utils.initlog(False) target = os.path.abspath(target) # Verify that all of the diretories are valid for d in (trunk_dir, install_dir, python_dir): assert os.path.isdir(d) try: # Run the vitamind install script (installs scipy and ffmpeg too) install_vitamind(install_dir) # Create the people tracker release create_people_tracker_release(trunk_dir, install_dir, work_dir, version) # Copy all necessary Python files
import sqlalchemy, sqlalchemy.orm logger = logging.getLogger('main') app = bottle.default_app() optlist, args = getopt.getopt(sys.argv[1:], 'a:c:hp:') optdict = dict(optlist) app.config.load_config(optdict.get('-c', 'web.ini')) engine = sqlalchemy.create_engine(app.config['db.url']) sess = sqlalchemy.orm.sessionmaker(bind=engine)() app.config['db.engine'] = engine app.config['db.session'] = sess import utils utils.initlog(app.config.get('log.level', 'INFO'), app.config.get('log.logfile', '')) session_opts = { 'session.type': 'ext:database', 'session.url': app.config['db.url'], 'session.lock_dir': '/var/lock', 'session.cookie_expires': 3600, 'session.auto': True } application = SessionMiddleware(app, session_opts) @bottle.route('/static/<filename:path>') def _static(filename): return bottle.static_file(filename, root='static/') import users, hosts, groups, records, local
return True def run_service(port=config.APP_PORT): # 任务数据接口 task_app = tornado.web.Application( [ (r'/api/monitor', MonitorOrderHandler), (r'/api/pvsync', PvOrderHandler), ], debug=(config.LOG_LEVEL == logging.DEBUG)) router = tornado.routing.RuleRouter([ (r'/api.*', task_app), ]) http_server = tornado.httpserver.HTTPServer(router) http_server.bind(port) http_server.start() tornado.ioloop.IOLoop.current().start() if __name__ == "__main__": utils.initlog('MonitorService') action = sys.argv[1:] port = action[0] logging.info('start monitor service {port}'.format(port=port)) run_service(port) logging.info('start monitor service {port}'.format(port=11000)) run_service(10001)