def makeSoInDocker(options): found = 0 for proj in options.projectlist: srcPath = fsutils.appendPath(proj['path'], 'src') actlog.log('makeSoInDocker', srcPath) if platform.system() == 'Windows': _, makesosh = fsutils.findTreeFiles( srcPath, ['.*' + os.path.sep + '(makeso.cmd)$'], ['.*\\.svn.*']) else: _, makesosh = fsutils.findTreeFiles( srcPath, ['.*' + os.path.sep + '(makeso.sh)$'], ['.*\\.svn.*']) if makesosh: for msh in makesosh: found = 1 cmd = srcPath + os.path.sep + msh os.chmod(cmd, stat.S_IRWXU | stat.S_IRWXG) actlog.log('run C/C++ compiler :', msh) status, output = commands.getstatusoutput(cmd) if status != 0: actlog.log('C/C++ compiler ERROR !!', cmd) actlog.log(output) return 0 if not found: actlog.log('run C/C++ compiler : not so found !!') return 1
def __load_project_file(options): jname = 'poker/project.json' datadict = __load_json_file(options, jname, list) if not datadict: return datadict game_packages = [] gameids = [] for proj in datadict: ppath = proj.get('path', None) if not isinstance(ppath, str): return actlog.error('the project path is not defined !', proj) if not fsutils.dirExists(ppath): return actlog.error('the project path not found !', ppath) srcpath = fsutils.appendPath(ppath, 'src') if fsutils.dirExists(srcpath): subdirs = os.listdir(srcpath) for subdir in subdirs: gamepy = fsutils.appendPath(srcpath, subdir, 'game.py') if fsutils.fileExists(gamepy): if subdir in game_packages: return actlog.error( 'the project package is double defined !', gamepy, 'proj=', proj) game_packages.append(subdir) proj['package'] = subdir gameid = proj.get('gameId', 0) if gameid > 0: gameids.append(int(gameid)) actlog.log('find gam project-> GAMEID=', gameid, 'PACKAGE=', subdir) return datadict, game_packages, gameids
def _load_old_datas(redisaddr): actlog.log('Load Redis Old Datas :', redisaddr) # 取得当前的数据库数据 rconn = tydb.get_redis_conn(redisaddr) oldkeys = [] olddatas = {} def get_old_datas(ckeys_): oldkeys.extend(ckeys_) oldvalues = rconn.mget(ckeys_) for x in xrange(len(ckeys_)): # actlog.log('old->', ckeys_[x], '=[', oldvalues[x], ']') olddatas[ckeys_[x]] = oldvalues[x] get_old_datas(["freetime:server", "freetime:db", "freetime:global"]) get_old_datas(["poker:cmd", "poker:oldcmd", "poker:machine", "poker:project", "poker:global"]) get_old_datas(["poker:map.productid"]) get_old_datas(["poker:map.bieventid"]) get_old_datas(["poker:map.clientid"]) get_old_datas(["poker:map.giftid"]) get_old_datas(["poker:map.activityid"]) cur = 0 while cur >= 0: datas = rconn.scan(cur, 'game:*', 999) cur = datas[0] ckeys = datas[1] if ckeys: get_old_datas(ckeys) if cur <= 0: break return oldkeys, olddatas
def load_project_datas_all(gamedir, options): datas = {} gameIds = [] for gds in os.listdir(gamedir): if gds[0] == '.': continue try: game_id = str(int(gds)) except: continue gameIds.append(int(game_id)) projectdir = gamedir + '/' + gds actlog.log('projectdir->', projectdir) for game_mod in os.listdir(projectdir): if game_mod == 'room' and game_mod[0] == '.': continue moddir = projectdir + '/' + game_mod actlog.log('moduledir->', moddir) for f in os.listdir(moddir): if f[0] == '.' or not f.endswith('.json'): continue jf = moddir + '/' + f js = readJsonData(jf, options) fn = os.path.basename(f).split('.')[0] # actlog.log('file->', 'game:' + game_id + ':' + game_mod + ':' + fn, jf) datas['game:' + game_id + ':' + game_mod + ':' + fn] = js return datas, gameIds
def getMachinePids(options, machine, pfilter=None): processids = options.processids procids = [] for p in options.serverlist: if p['ip'] == machine['intranet'] or p['ip'] == machine['internet']: pid = str(p['type']) + str(p['id']) if processids: if pid in processids: if pfilter: if str(p['type']) in pfilter: procids.append(pid) else: procids.append(pid) else: if pfilter: if str(p['type']) in pfilter: procids.append(pid) else: procids.append(pid) if len(procids) <= 0: actlog.log('this machine has no server defines !' + str(machine['intranet'])) return 0 actlog.log('machine', machine['internet'], 'sids=', procids) return procids
def getMachinePids(options, machine, pfilter=None): processids = options.processids procids = [] for p in options.serverlist: if p['ip'] == machine['intranet'] or p['ip'] == machine['internet']: pid = str(p['type']) + str(p['id']) if processids: if pid in processids: if pfilter: if str(p['type']) in pfilter: procids.append(pid) else: procids.append(pid) else: if pfilter: if str(p['type']) in pfilter: procids.append(pid) else: procids.append(pid) if len(procids) <= 0: actlog.log('this machine has no server defines !' + str(machine['intranet'])) return 0 actlog.log('machine', machine['internet'], 'sids=', procids) return procids
def load_project_datas_all(gamedir, options): datas = {} gameIds = [] for gds in os.listdir(gamedir): if gds[0] == '.': continue try: game_id = str(int(gds)) except: continue gameIds.append(int(game_id)) projectdir = gamedir + '/' + gds actlog.log('projectdir->', projectdir) for game_mod in os.listdir(projectdir): if game_mod == 'room' and game_mod[0] == '.': continue moddir = projectdir + '/' + game_mod actlog.log('moduledir->', moddir) for f in os.listdir(moddir): if f[0] == '.' or not f.endswith('.json'): continue jf = moddir + '/' + f js = readJsonData(jf, options) fn = os.path.basename(f).split('.')[0] # actlog.log('file->', 'game:' + game_id + ':' + game_mod + ':' + fn, jf) datas['game:' + game_id + ':' + game_mod + ':' + fn] = js return datas, gameIds
def _thread_action_push_ssh(controls): controls['percent'] = '---%' params = controls['params'] machine = controls['machine'] tarfile = params['tarfile'] taroutpath = params['taroutpath'] tarsubpath = params['tarsubpath'] rmLeft = params['rmLeft'] options = params['options'] tarpath = fsutils.getParentDir(tarfile) host = machine['host'] tyssh.connect(host, machine['user'], machine['pwd'], machine['ssh']) controls['percent'] = '--+%' tyssh.mkdirs(host, options.env['output_path']) tyssh.mkdirs(host, options.env['log_path']) tyssh.mkdirs(host, options.env['webroot_path']) tyssh.mkdirs(host, options.env['bin_path']) tyssh.mkdirs(host, options.env['backup_path']) tyssh.mkdirs(host, tarpath) controls['percent'] = '000%' localfilesize = os.path.getsize(tarfile) def update_send_size(sendsize_, allsize_): if sendsize_ == allsize_ : p = 100 else: p = int((float(sendsize_) / float(allsize_)) * 100) controls['percent'] = '% 3d' % (p) + '%' putsize = tyssh.put_file(host, tarfile, tarfile, update_send_size) if int(putsize) != localfilesize : return 2, 'SSH Push ERROR ' + tarfile controls['percent'] = '110%' bin_path = options.env['bin_path'] remotepy = fsutils.appendPath(bin_path, 'remote.py') remotetarpy = fsutils.appendPath(tarpath, 'remote.py') putsize = tyssh.put_file(host, remotepy, remotetarpy, None) localfilesize = os.path.getsize(remotepy) if int(putsize) != localfilesize : return 2, 'SSH Push ERROR ' + remotetarpy controls['percent'] = '120%' cmdline = 'pypy %s %s %s %s %s %s' % (remotetarpy, 'xvf', tarfile, taroutpath, tarsubpath, rmLeft) outputs = tyssh.executecmd(host, cmdline) status = tyssh.parse_remote_datas_int(outputs) if status != 0 : for l in outputs.split('\n'): actlog.log('REMOTE ERROR', l) return 2, 'SSH Push remote tar xvf ERROR' controls['percent'] = '++++' return 1, ''
def make_process_list(options, machinedict, gameids): jname = 'poker/process.json' jsonfile = fsutils.appendPath(options.poker_path, jname) actlog.log('load %-15s :' % (jname), jsonfile) processlist = fsutils.readJsonFile(jsonfile, True) if not isinstance(processlist, dict) : return actlog.error(jname + ' : format error, root object must be dict'), machinedict return processlist, machinedict
def __load_json_file(options, jname, jtype): jsonfile = fsutils.appendPath(options.poker_path, jname) actlog.log('load %-15s :' % (jname), jsonfile) datadict = fsutils.readJsoFileParseEnv(options.env, jsonfile, True) if not isinstance(datadict, jtype): return actlog.error(jname + ' : format error, root object must be ' + str(jtype)) return datadict
def make_process_list(options, machinedict, gameids): jname = 'poker/process.json' jsonfile = fsutils.appendPath(options.poker_path, jname) actlog.log('load %-15s :' % (jname), jsonfile) processlist = fsutils.readJsonFile(jsonfile, True) if not isinstance(processlist, dict): return actlog.error( jname + ' : format error, root object must be dict'), machinedict return processlist, machinedict
def action(options, params={}): ''' ''' svnuser = params.get('svnuser', None) svnpwd = params.get('svnpwd', None) svnau = '' if svnuser and svnpwd: svnau = ' --username ' + str(svnuser) + ' --password ' + str( svnpwd) + ' --no-auth-cache ' projects_path = options.pokerdict['projects_path'] cmd = ['cd ' + projects_path] cmd.append('export LANG=en_US.UTF-8') cmd.append('echo "================================"') cmd.append('svn up --non-interactive ' + svnau + ' ./*') cmd.append('echo "================================"') # cmd.append('svn info ./*') # cmd.append('echo "================================"') cmd = ';'.join(cmd) # actlog.log('cmd line->', cmd) shname = '/home/tyhall/hall37/source/config_online/104/roomall.sh' if fsutils.fileExists(shname): commands.getstatusoutput(shname) status, output = commands.getstatusoutput(cmd) actlog.log('cmd return->', status) actlog.log('cmd output->') lines = output.split('\n') for l in lines: actlog.log(l) actlog.log('done') return 1
def action(options, params={}): ''' ''' svnuser = params.get('svnuser', None) svnpwd = params.get('svnpwd', None) svnau = '' if svnuser and svnpwd : svnau = ' --username ' + str(svnuser) + ' --password ' + str(svnpwd) + ' --no-auth-cache ' projects_path = options.pokerdict['projects_path'] cmd = ['cd ' + projects_path] cmd.append('export LANG=en_US.UTF-8') cmd.append('echo "================================"') cmd.append('svn up --non-interactive ' + svnau + ' ./*') cmd.append('echo "================================"') # cmd.append('svn info ./*') # cmd.append('echo "================================"') cmd = ';'.join(cmd) # actlog.log('cmd line->', cmd) shname = '/home/tyhall/hall37/source/config_online/104/roomall.sh' if fsutils.fileExists(shname) : commands.getstatusoutput(shname) status, output = commands.getstatusoutput(cmd) actlog.log('cmd return->', status) actlog.log('cmd output->') lines = output.split('\n') for l in lines : actlog.log(l) actlog.log('done') return 1
def make_process_list(options, machinedict, gameids): processlist, machinedict = mode1.make_process_list(options, machinedict, gameids) haspl = 1 if 'PL' in processlist else 0 actlog.log('Use tiny process config !') gs = {} for gameId in gameids: gs[str(gameId) + '-001-998-1'] = [gameId * 1000 + 1, gameId * 1000 + 999] mn = machinedict.keys()[0] macs = {mn: machinedict[mn]} machinedict = macs processlist = { "CO": { "count": 1, "machines": [mn] }, "HT": { "count": 1, "machines": [mn] }, "RB": { "count": 1, "machines": [mn] }, "CT": { "count": 1, "machines": [mn] }, "UT": { "count": 1, "machines": [mn] }, "GR": { "count": 1, "machines": [mn] }, "GT": { "count": 1, "machines": [mn] }, "GROUPS": gs } if haspl: processlist['PL'] = {"count": 1, "machines": [mn]} return processlist, macs
def load_project_datas_room(projectdir, options): datas = {} game_id = os.path.basename(projectdir) game_mod = 'room' moddir = projectdir + '/' + game_mod actlog.log('roomdir->', moddir) if os.path.isdir(moddir): for f in os.listdir(moddir): if f[0] == '.' or not f.endswith('.json'): continue jf = moddir + '/' + f js = readJsonData(jf, options) fn = os.path.basename(f).split('.')[0] # actlog.log('room->', 'game:' + game_id + ':' + game_mod + ':' + fn, jf) datas['game:' + game_id + ':' + game_mod + ':' + fn] = js return datas
def thread_run_action(options, action): try: action['stime'] = datetime.now().strftime('%Y%m%d_%H%M%S') actlog.open_act_log(options, action) acthistory.save_action_history(options, action) actlog.log('START ACTION', action) thread_do_action(options, action) action['result'] = 'Ok' except: actlog.error() action['result'] = 'Exception' finally: actlog.close_act_log() action['etime'] = datetime.now().strftime('%Y%m%d_%H%M%S') acthistory.save_action_history(options, action)
def thread_run_action(options, action): try: action['stime'] = datetime.now().strftime('%Y%m%d_%H%M%S') actlog.open_act_log(options, action) acthistory.save_action_history(options, action) actlog.log('START ACTION', action) thread_do_action(options, action) action['result'] = 'Ok' except: actlog.error() action['result'] = 'Exception' finally: actlog.close_act_log() action['etime'] = datetime.now().strftime('%Y%m%d_%H%M%S') acthistory.save_action_history(options, action)
def load_project_datas_room(projectdir, options): datas = {} game_id = os.path.basename(projectdir) game_mod = 'room' moddir = projectdir + '/' + game_mod actlog.log('roomdir->', moddir) if os.path.isdir(moddir): for f in os.listdir(moddir): if f[0] == '.' or not f.endswith('.json'): continue jf = moddir + '/' + f js = readJsonData(jf, options) fn = os.path.basename(f).split('.')[0] # actlog.log('room->', 'game:' + game_id + ':' + game_mod + ':' + fn, jf) datas['game:' + game_id + ':' + game_mod + ':' + fn] = js return datas
def action(options): ''' 生成所有的进程启动脚本 ''' bin_path = options.env['bin_path'] config_redis = options.pokerdict.get('local_config_redis', options.pokerdict['config_redis']) remotepy = loadResource('remote.py') remotepy = remotepy.replace('${TIME}', str(datetime.now())) remotepy = remotepy.replace('${BIN_PATH}', bin_path) remotepy = remotepy.replace('${LOG_PATH}', options.env['log_path']) remotepy = remotepy.replace('${REDIS_CONFIG}', config_redis) cfilepath = fsutils.appendPath(bin_path, 'remote.py') fsutils.writeFile('', cfilepath, remotepy) if options.pokerdict['mode'] <= 2: sendmail = '1' else: sendmail = '0' while1sh = loadResource('while1.sh') while1sh = while1sh.replace('${SENDMAIL}', sendmail) while1sh = while1sh.replace('${BIN_PATH}', bin_path) while1sh = while1sh.replace('${LOG_PATH}', options.env['log_path']) if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,设置PYTHONPATH环境变量 paths = [] for proj in options.projectlist: src = fsutils.appendPath(proj['path'], 'src') if fsutils.dirExists(src): paths.append(src) else: src = fsutils.appendPath(proj['path'], 'freetime') if fsutils.dirExists(src): paths.append(proj['path']) paths.reverse() paths = ':'.join(paths) while1sh = while1sh.replace('${DOCKER_PROJECT_PATH}', paths) actlog.log('makeControlInDocker PYTHONPATH=', paths) shfile = fsutils.appendPath(bin_path, 'while1.sh') fsutils.writeFile('', shfile, while1sh) os.chmod(shfile, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) return 1
def action(options): ''' 生成所有的进程启动脚本 ''' bin_path = options.env['bin_path'] config_redis = options.pokerdict.get('local_config_redis', options.pokerdict['config_redis']) remotepy = loadResource('remote.py') remotepy = remotepy.replace('${TIME}', str(datetime.now())) remotepy = remotepy.replace('${BIN_PATH}', bin_path) remotepy = remotepy.replace('${LOG_PATH}', options.env['log_path']) remotepy = remotepy.replace('${REDIS_CONFIG}', config_redis) cfilepath = fsutils.appendPath(bin_path, 'remote.py') fsutils.writeFile('', cfilepath, remotepy) if options.pokerdict['mode'] <= 2 : sendmail = '1' else: sendmail = '0' while1sh = loadResource('while1.sh') while1sh = while1sh.replace('${SENDMAIL}', sendmail) while1sh = while1sh.replace('${BIN_PATH}', bin_path) while1sh = while1sh.replace('${LOG_PATH}', options.env['log_path']) if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,设置PYTHONPATH环境变量 paths = [] for proj in options.projectlist: src = fsutils.appendPath(proj['path'], 'src') if fsutils.dirExists(src): paths.append(src) else: src = fsutils.appendPath(proj['path'], 'freetime') if fsutils.dirExists(src): paths.append(proj['path']) paths.reverse() paths = ':'.join(paths) while1sh = while1sh.replace('${DOCKER_PROJECT_PATH}', paths) actlog.log('makeControlInDocker PYTHONPATH=', paths) shfile = fsutils.appendPath(bin_path, 'while1.sh') fsutils.writeFile('', shfile, while1sh) os.chmod(shfile, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) return 1
def __load_server_file(options, machinedict, gameids): mode = options.env['mode'] processlist, machinedict = modefactory[mode].make_process_list( options, machinedict, gameids) if not processlist: return processlist allrooms = {} # 装载房间的配置,用于获取房间进程ID和数量 for gameId in gameids: jname = 'game/' + str(gameId) + '/room/0.json' jsonfile = fsutils.appendPath(options.poker_path, jname) if fsutils.fileExists(jsonfile): actlog.log('load %-15s :' % (jname), jsonfile) rooms = fsutils.readJsonFile(jsonfile, True) if not isinstance(rooms, dict): return actlog.error( jname + ' : format error, root object must be dict') for rid in rooms: if rid in allrooms: return actlog.error(jname + ' : the roomId already defined !! ' + str(rid)) allrooms[rid] = rooms[rid] serverlist = auto_process.auto_group_process(machinedict, processlist, allrooms, mode) serverlist = strutil.replace_objevn_value(serverlist, options.env) checks = strutil.cloneData(serverlist) for _, m in options.machinedict.items(): internet = m['internet'] intranet = m['intranet'] for x in xrange(len(checks) - 1, -1, -1): p = checks[x] if p['ip'] == internet or p['ip'] == intranet: del checks[x] if len(checks) > 0: for p in checks: actlog.error('can not find machine define of server ip :', p['ip']) return 0 return serverlist
def action(options): """ 拷贝源代码工程的etc、src、src-robot、webroot到编译输出目录,按照配置文件的工程列表进行顺序覆盖拷贝 """ # 创建所有的路径 allpaths = [ options.env['log_path'], options.env['webroot_path'], options.env['backup_path'], ] for mp in allpaths: fsutils.makePath(mp, False) allpaths = [ options.env['bin_path'] ] for mp in allpaths: fsutils.makePath(mp, True) if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,webroot为link模式,无需拷贝 copySourceInDocker(options) actlog.log('docker mode use original project src path !') return 1 paths = [] for proj in options.projectlist: src = fsutils.appendPath(proj['path'], 'src') if fsutils.dirExists(src): paths.append({'path': src, 'include': [], 'exclude': [".*\\.svn\\.*", ".*pyc"]}) else: # freetime project paths.append({'path': proj['path'], "include": ["^/freetime/.*"], "exclude": [".*\\.svn\\.*", ".*pyc", ".*\\logserver\\.*", ".*\\cold-data-server\\.*"]}) if ide_debug(): _, copy_files = fsutils.linkTree(paths, options.env['bin_path'], logfun=actlog.log) else: _, copy_files = fsutils.copyTree(paths, options.env['bin_path'], logfun=actlog.log) setattr(options, '_pyfiles', copy_files) return 1
def action(options): actlog.log('push webroot file to all machines !') if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,webroot为link模式,无需拷贝 actlog.log('docker mode skip this step !') return 1 if options.pokerdict['mode'] in (3, 4): actlog.log('Testing Mode, skip this step.') return 1 cpath = 'web_' + datetime.now().strftime('%Y%m%d_%H%M%S') bpath = options.env['backup_path'] outpath = fsutils.appendPath(bpath, cpath) webpath = options.env['webroot_path'] tarfile = tytar.tar_cvfz(outpath, webpath) actlog.log('webroot file =', tarfile) ret = typush.push_tar_to_all_server(options, tarfile, fsutils.getParentDir(webpath), fsutils.getLastPathName(webpath), 0) actlog.log('push webroot file to all machines ! done !!') return ret
def action(options): ''' 拷贝源代码工程的webroot到编译输出目录,按照配置文件的工程列表进行顺序覆盖拷贝 ''' actlog.log('copy webroot to bin/webroot') if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,webroot为link模式,无需拷贝 actlog.log('docker mode skip this step !') return 1 allpaths = [ options.env['webroot_path'], options.env['backup_path'], ] for mp in allpaths: fsutils.makePath(mp, False) paths = [] for proj in options.projectlist: src = fsutils.appendPath(proj['path'], 'webroot') if fsutils.dirExists(src): paths.append({ 'path': src, 'include': [], 'exclude': [".*\\.svn\\.*", ".*pyc"] }) if options.pokerdict['mode'] in (1, 2): fsutils.copyTree(paths, options.env['webroot_path'], logfun=actlog.log) return 1 # MODE 3, 4: 只链接, 并且只链接 tygame-webroot/webroot dstpath = options.env['webroot_path'] for pathconf in paths: if 'tygame-webroot' in pathconf['path']: srcpath = pathconf['path'] if os.path.islink(dstpath): if os.readlink(dstpath) == srcpath: # 已经链接好了 actlog.log('already linked.', dstpath, '->', srcpath) return 1 fsutils.deletePath(dstpath) os.symlink(srcpath, dstpath) actlog.log('symlink created.', dstpath, '->', srcpath) return 1 else: fsutils.copyTree(paths, options.env['webroot_path'], logfun=actlog.log) return 1
def makeSoInDocker(options): found = 0 for proj in options.projectlist: srcPath = fsutils.appendPath(proj['path'], 'src') actlog.log('makeSoInDocker', srcPath) if platform.system() == 'Windows': _, makesosh = fsutils.findTreeFiles(srcPath, ['.*' + os.path.sep + '(makeso.cmd)$'], ['.*\\.svn.*']) else: _, makesosh = fsutils.findTreeFiles(srcPath, ['.*' + os.path.sep + '(makeso.sh)$'], ['.*\\.svn.*']) if makesosh: for msh in makesosh: found = 1 cmd = srcPath + os.path.sep + msh os.chmod(cmd, stat.S_IRWXU | stat.S_IRWXG) actlog.log('run C/C++ compiler :', msh) status, output = commands.getstatusoutput(cmd) if status != 0: actlog.log('C/C++ compiler ERROR !!', cmd) actlog.log(output) return 0 if not found: actlog.log('run C/C++ compiler : not so found !!') return 1
def action(options): actlog.log('push bin file to all machines !') if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,webroot为link模式,无需拷贝 actlog.log('docker mode skip this step !') return 1 if options.pokerdict['mode'] in (3, 4): actlog.log('Testing Mode, skip this step.') return 1 cpath = 'bin_' + datetime.now().strftime('%Y%m%d_%H%M%S') bpath = options.env['backup_path'] outpath = fsutils.appendPath(bpath, cpath) bin_path = options.env['bin_path'] tarfile = tytar.tar_cvfz(outpath, options.env['bin_path']) actlog.log('bin file =', tarfile) ret = typush.push_tar_to_all_server(options, tarfile, fsutils.getParentDir(bin_path), fsutils.getLastPathName(bin_path), 1) actlog.log('push bin file to all machines ! done !!') return ret
def make_process_list(options, machinedict, gameids): processlist, machinedict = mode1.make_process_list(options, machinedict, gameids) haspl = 1 if 'PL' in processlist else 0 actlog.log('Use tiny process config !') gs = {} for gameId in gameids : gs[str(gameId) + '-001-998-1'] = [gameId * 1000 + 1, gameId * 1000 + 999] mn = machinedict.keys()[0] macs = { mn: machinedict[mn]} machinedict = macs processlist = {"CO" : { "count" : 1, "machines" : [mn]}, "HT" : { "count" : 1, "machines" : [mn]}, "RB" : { "count" : 1, "machines" : [mn]}, "CT" : { "count" : 1, "machines" : [mn]}, "UT" : { "count" : 1, "machines" : [mn]}, "GR" : { "count" : 1, "machines" : [mn]}, "GT" : { "count" : 1, "machines" : [mn]}, "GROUPS" : gs } if haspl : processlist['PL'] = { "count" : 1, "machines" : [mn]} return processlist, macs
def action(options): ''' 拷贝源代码工程的webroot到编译输出目录,按照配置文件的工程列表进行顺序覆盖拷贝 ''' actlog.log('copy webroot to bin/webroot') if os.environ.get('RUN_IN_DOCKER', 0) : # 在开发docker模式下,webroot为link模式,无需拷贝 actlog.log('docker mode skip this step !') return 1 allpaths = [ options.env['webroot_path'], options.env['backup_path'], ] for mp in allpaths: fsutils.makePath(mp, False) paths = [] for proj in options.projectlist : src = fsutils.appendPath(proj['path'], 'webroot') if fsutils.dirExists(src) : paths.append({'path':src, 'include' : [], 'exclude' : [".*\\.svn\\.*", ".*pyc"]}) if options.pokerdict['mode'] in (1, 2): fsutils.copyTree(paths, options.env['webroot_path'], logfun=actlog.log) return 1 # MODE 3, 4: 只链接, 并且只链接 tygame-webroot/webroot dstpath = options.env['webroot_path'] for pathconf in paths: if 'tygame-webroot' in pathconf['path']: srcpath = pathconf['path'] if os.path.islink(dstpath): if os.readlink(dstpath) == srcpath: # 已经链接好了 actlog.log('already linked.', dstpath, '->', srcpath) return 1 fsutils.deletePath(dstpath) os.symlink(srcpath, dstpath) actlog.log('symlink created.', dstpath, '->', srcpath) return 1 else: fsutils.copyTree(paths, options.env['webroot_path'], logfun=actlog.log) return 1
def action(options): actlog.log('push static config file to all machines !') if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,webroot为link模式,无需拷贝 actlog.log('docker mode skip this step !') return 1 cpath = 'stc_' + datetime.now().strftime('%Y%m%d_%H%M%S') bpath = options.env['backup_path'] outpath = fsutils.appendPath(bpath, cpath) stpath = options.env['webroot_path'] + '/static_file' tarfile = tytar.tar_cvfz(outpath, stpath) actlog.log('static config file =', tarfile) ret = typush.push_tar_to_all_server(options, tarfile, fsutils.getParentDir(stpath), fsutils.getLastPathName(stpath), 1) actlog.log('push static config file to all machines ! done !!') return ret
def action(options): actlog.log('push static config file to all machines !') if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,webroot为link模式,无需拷贝 actlog.log('docker mode skip this step !') return 1 cpath = 'stc_' + datetime.now().strftime('%Y%m%d_%H%M%S') bpath = options.env['backup_path'] outpath = fsutils.appendPath(bpath, cpath) stpath = options.env['webroot_path'] + '/static_file' tarfile = tytar.tar_cvfz(outpath, stpath) actlog.log('static config file =', tarfile) ret = typush.push_tar_to_all_server(options, tarfile, fsutils.getParentDir(stpath), fsutils.getLastPathName(stpath), 1) actlog.log('push static config file to all machines ! done !!') return ret
def action(options): """ 这个部分启动所有的远程进程, 取得所有进程的pid, pid需要在3秒内保持不变即可 随后, 发送hotcmd命令到各个进程去查询其运行状态 """ actlog.log('server start on all machines !') params = {'options': options} filters = [['AG'], ['UT', 'PL', 'PG', 'CT', 'RB'], ['GR', 'GT'], ['HT', 'CO']] for f in filters: params['thread_filter'] = f haserror = tythread.mutil_thread_machine_action(params, _thread_action_start) if haserror: actlog.log('server start error !') return 0 if haserror: actlog.log('server start error !') return 0 return 1
def action(options): ''' ''' pkey = os.environ.get('MAINCLS', '') actlog.log('pkey->', pkey) if not pkey: return 0 for p in psutil.process_iter(): try: cmdline = p.cmdline() except: continue actlog.log('cmdline->', cmdline) if pkey in cmdline: try: p.kill() except Exception, e: actlog.log(e)
def action(options): ''' ''' pkey = os.environ.get('MAINCLS', '') actlog.log('pkey->', pkey) if not pkey : return 0 for p in psutil.process_iter(): try: cmdline = p.cmdline() except: continue actlog.log('cmdline->', cmdline) if pkey in cmdline : try: p.kill() except Exception, e: actlog.log(e)
def action(options): """ 这个部分启动所有的远程进程, 取得所有进程的pid, pid需要在3秒内保持不变即可 随后, 发送hotcmd命令到各个进程去查询其运行状态 """ actlog.log('server start on all machines !') params = {'options': options} filters = [['AG'], ['UT', 'PL', 'PG', 'CT', 'RB'], ['GR', 'GT'], ['HT', 'CO']] for f in filters: params['thread_filter'] = f haserror = tythread.mutil_thread_machine_action( params, _thread_action_start) if haserror: actlog.log('server start error !') return 0 if haserror: actlog.log('server start error !') return 0 return 1
def mutil_thread_machine_action(params, fun_thread_main): stime = time.time() threads = [] machines = params['options'].machinedict.values() for machine in machines: # done 0 - 线程运行中 1 - 线程结束 # result 0 - 操作进行中 1 - 正常结束 2 - 异常结束 controls = {'params': params, 'machine': machine, 'done': 0, 'result': 0} t = cread_thread(fun_thread_main, controls) controls['thread'] = t threads.append(controls) # runchars = ['*', '-', '\\', '|', '/'] isdoneall = 0 resultall = 0 # wcount = 0 while 1: ct = time.time() slines = [] isdoneall = 0 resultall = 0 for worker in threads: isdone = worker.get('done', 0) result = worker.get('result', 0) percent = worker.get('percent', None) isdoneall += isdone resultall += result if result == 0: st = 'R' # runchars[wcount % len(runchars)] elif result == 1: st = 'O' else: st = 'E' if percent is not None: if result == 0: st = percent + st elif result == 1: st = percent + st else: st = percent + st slines.append(st + ' ') ptime = '%03d' % (time.time() - stime) lmsg = 'PROGRESS : ' + ptime + ' ' + ''.join(slines) actlog.log(lmsg) if isdoneall == len(threads): break time.sleep(max(0.1, 1 - time.time() + ct)) # wcount += 1 haserror = 0 for worker in threads: ip = worker['machine']['intranet'] result = worker.get('result', 0) if result == 1: resultstr = 'OK' else: resultstr = 'ERROR' haserror = 1 msg = 'REMOTE : %-16s : %s' % (ip, resultstr) if result == 1: actlog.log(msg) else: outputs = worker.get('outputs', '') for l in outputs.split('\n'): actlog.log(msg, l) finalstatus = worker.get('finalstatus', None) if finalstatus: for line in finalstatus: actlog.log(line) return haserror
def action(options): """ 预编译所有的py文件到pyc,以便发现语法错误 """ if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,webroot为link模式,无需拷贝 if not makeSoInDocker(options): return 0 actlog.log('docker mode skip compiler pyc !') return 1 # 编译SO文件 bin_path = options.env['bin_path'] # LINUX WIN32 if platform.system() == 'Darwin': makesosh = [ os.path.join('freetime', 'core', 'cffi', 'makeso.sh'), os.path.join('poker', 'util', 'cffi', 'makeso.sh'), ] elif platform.system() == 'Windows': _, makesosh = fsutils.findTreeFiles(bin_path, ['.*' + os.path.sep + '(makeso.cmd)$'], ['.*\\.svn.*']) else: _, makesosh = fsutils.findTreeFiles(bin_path, ['.*' + os.path.sep + '(makeso.sh)$'], ['.*\\.svn.*']) if len(makesosh) == 0: actlog.log('run C/C++ compiler : not found !!') for msh in makesosh: cmd = bin_path + os.path.sep + msh os.chmod(cmd, stat.S_IRWXU | stat.S_IRWXG) actlog.log('run C/C++ compiler :', msh) status, output = commands.getstatusoutput(cmd) if status != 0: actlog.log('C/C++ compiler ERROR !!', cmd) actlog.log(output) return 0 # 只有再Linux运行环境下才进行预编译 if platform.system() != 'Linux': return 1 # 生成编译文件 pkgs = [] pyfiles = options._pyfiles for pyf in pyfiles: if pyf.endswith('.py') and pyf.find('hotfix') < 0: pkg = ' import ' + '.'.join(pyf.split(os.path.sep)[1:]) pkg = pkg[0:-3] if pkg.endswith('__init__'): pkg = pkg[0:-9] pkgs.append(pkg) content = ''' # -*- coding: utf-8 -*- # author time : %s import sys from twisted.internet import reactor # 确保reactor第一时间初始化, 否则可能莫名其妙的crash if sys.getdefaultencoding().lower() != 'utf-8' : reload(sys) sys.setdefaultencoding("utf-8") try: %s except: print sys.path raise ''' % (str(datetime.now()), '\n'.join(pkgs)) cfilepath = bin_path + os.path.sep + '_compiler.py' fsutils.writeFile('', cfilepath, content) actlog.log('run PYPY compiler :', cfilepath) pypy = strutil.getEnv('PYPY', 'pypy') cmd = '%s -tt %s' % (pypy, cfilepath) status, output = commands.getstatusoutput(cmd) if status != 0: actlog.log('ERROR !!', 'compile py files false !', status, cfilepath) lines = output.split('\n') for line in lines: actlog.log(line) return 0 else: # fsutils.deleteFile(cfilepath) for pyf in pyfiles: if pyf.endswith('.py'): f = bin_path + os.path.sep + pyf + 'c' if fsutils.fileExists(f): fsutils.deleteFile(f) return 1
def action(options, integrate=0): ''' 装载并检测服务启动配置文件 ''' serverlist = options.serverlist setattr(options, 'hotfixpy', HOTCODE) setattr(options, 'hotfixwait', 1) x = 0 serverIds = [] for srv in serverlist : x += 1 serverIds.append(srv['type'] + srv['id']) serverIds = ','.join(serverIds) setattr(options, 'serverIds', serverIds) if integrate == 0 : actlog.log('get configure status->', serverIds) ret = hotfix.action(options, 0) config_redis = options.pokerdict['config_redis'] rconn = tydb.get_redis_conn(config_redis) clen = rconn.llen(_CHANGE_KEYS_NAME) try: datas = strutil.loads(ret) except: actlog.log('ERROR !!', ret) if integrate == 0 : return 0 return 0, 0 confOks = [] confNgs = [] errors = [] errorids = [] for sid in datas : if isinstance(datas[sid], dict): cidx = datas[sid].get('CINDEX', None) cerrs = datas[sid].get('ERRORS', []) else: cidx = '' cerrs = [] actlog.log('ERROR !!!! ', sid, datas[sid]) if cerrs : errorids.append(sid) for cerr in cerrs : erritem = [cerr.get('exception', None), cerr.get('tarceback', None)] if not erritem in errors : errors.append(erritem) if isinstance(cidx, int) : if cidx >= clen : confOks.append(sid) else: confNgs.append(sid) else: actlog.error('ERROR !!', sid, 'GET STATUS ERROR !!', datas[sid]) if errors : actlog.log('ERROR IDS =', ','.join(errorids)) actlog.log('========== ERROR !!!! BEGINE ========') for x in errors : actlog.log('========== Exception ========') for l in x[0] : for m in l.split('\n') : if m : actlog.log(m) actlog.log('========== Traceback ========') for l in x[1] : for m in l.split('\n') : if m : actlog.log(m) actlog.log('========== ERROR !!!! END ========') raise Exception('Remote Exception') if integrate == 0 : return 0 else: return 0, 0 if integrate == 0 : actlog.log('THE CONFIGURE KEY INDEX =', clen) actlog.log('TOTAL_COUNT =', len(serverlist) , 'OK_COUNT =', len(confOks), 'DELAY_COUNT =', len(confNgs)) actlog.log('CONFIGURE_STATUS = %0.2d' % (int(float(len(confOks)) / len(datas) * 100)) + '%') if integrate == 0 : return 1 else: return len(confOks), len(serverlist)
def auto_group_process(mcs, procs, rooms, mode): for mc in mcs.values(): weight = int(mc.get('weight', 1)) if weight < 1 or weight > 9: raise Exception('then machine weight must >= 1 and <= 9 machine=' + json.dumps(mc)) mc['weight'] = weight rids, tids = _auto_room_ins_ids(rooms, procs, mode) stypes = ['PL', 'HT', 'CO', 'RB', 'CT', 'UT'] for stype in stypes: _auto_group_process_normal(stype, mcs, procs) _auto_group_process_rooms('GR', mcs, procs, rids) _auto_group_process_rooms('GT', mcs, procs, tids) stypes = ['PL', 'HT', 'RB', 'CT', 'UT', 'GR', 'GT'] agid = 0 agmax = modefactory[mode].get_max_ag_count(mcs) for mn, mc in mcs.items(): ids = [] if agmax > 0: ag = agmax else: co = len(mc.get('CO', [])) ot = 0 for st in stypes: ot += len(mc.get(st, [])) ag = co + int(math.ceil(ot / 10.0)) # CO进程1:1AG, 其它进程10:1AG ag = max(min(ag, 20), co) if ag > 999: raise Exception('the auto ag count must be 0~999' + json.dumps(mc)) for _ in xrange(ag): agid += 1 ids.append('%03d' % (agid)) mc['AG'] = ids totals = {} ccc = 0 stypes = ['CO', 'PL', 'HT', 'RB', 'CT', 'UT', 'GR', 'GT', 'AG'] for mn, mc in mcs.items(): ptotals = {} cc = 0 for st in stypes: c = len(mc.get(st, [])) if c > 0: ptotals[st] = c if st in totals: totals[st] += c else: totals[st] = c cc += c ccc += c actlog.log( 'COUNT %-3s %-4s %-10s' % (str(cc), mn, mc.get('intranet', mc.get('internet'))), json.dumps(ptotals)) actlog.log('TOTAL %-4s' % (str(ccc)), json.dumps(totals)) coPortMin = 0 coPortMax = 0 if os.environ.get('RUN_IN_DOCKER', 0): coPortMin = int(os.environ['PORT_TCP_CONN_MIN']) coPortMax = int(os.environ['PORT_TCP_CONN_MAX']) srvs = [] for stype in ['PL', 'HT', 'CO', 'AG', 'RB', 'CT', 'UT', 'GR', 'GT']: mns = list(mcs.keys()) mns.sort() for mn in mns: mc = mcs[mn] ids = mc.get(stype, []) if not 'agidx' in mc: mc['agidx'] = 0 if ids: for x in ids: srv = {} srv['mn'] = mn srv['type'] = stype srv['id'] = x srv['ip'] = mc.get('intranet', mc.get('internet')) tc = procs.get(stype, {}).get("task-concurrent", 0) if tc > 0: srv["task-concurrent"] = tc if stype in ('PL', ): srv["protocols"] = { "server": { "ht-http": "${{port_http++}}" } } srv["mysql"] = "${{db.mysql.all}}" srv["redis"] = "${{db.redis.all}}" elif stype in ('HT', ): srv["protocols"] = { "server": { "ht-http": "${{port_http++}}" } } srv["redis"] = "${{db.redis.all}}" elif stype in ('CO', ): srv["protocols"] = { "server": { "co-tcp": "${{port_http++}}" } } srv["mysql"] = "${{db.mysql.all}}" srv["redis"] = "${{db.redis.all}}" elif stype in ('AG', ): srv["protocols"] = { "server": { "a2a": "${{port_http++}}", "a2s": "${{port_http++}}" } } elif stype in ('RB', 'CT', 'GR', 'GT'): srv["redis"] = "${{db.redis.all}}" elif stype in ('UT'): srv["redis"] = "${{db.redis.all}}" if mode == 1 and int(x) <= 80: srv["mysql"] = "${{db.mysql.all}}" elif mode == 2 and int(x) <= 2: srv["mysql"] = "${{db.mysql.all}}" elif int(x) <= 1: srv["mysql"] = "${{db.mysql.all}}" if os.environ.get('RUN_IN_DOCKER', 0): if stype in ('CO', ): if coPortMin > coPortMax: raise Exception( 'the PORT_TCP_CONN_MIN PORT_TCP_CONN_MAX not enought range ! coPortMax=' + str(coPortMax)) srv["protocols"] = { "server": { "co-tcp": coPortMin } } coPortMin += 1 if stype != 'AG': aids = mc.get('AG') srv['agent'] = "AG" + aids[mc['agidx'] % len(aids)] mc['agidx'] += 1 # print json.dumps(srv) srvs.append(srv) return srvs
def _thread_action_start(controls): """ 这个方法运行再多线程当中 """ params = controls['params'] machine = controls['machine'] options = params['options'] pfilter = params['thread_filter'] procids = getMachinePids(options, machine, pfilter) if not procids: controls['done'] = 1 controls['result'] = 1 controls['outputs'] = 'the procids is empty' return controls['percent'] = '++++' # 启动本机的所有进程 rparams = ['start'] rparams.extend(procids) result, outputs = execute_remote_py(options, machine, rparams) if result != 0: actlog.log('remote start false !') actlog.log('---------------------------------------------------------------') for l in outputs.split('\n'): actlog.log(l) actlog.log('---------------------------------------------------------------') controls['done'] = 1 controls['result'] = 2 controls['percent'] = 'done' controls['outputs'] = outputs return # 获得本机的所有进程基本信息 thread_info = tyssh.parse_remote_datas_json(outputs, 'TY_THREAD_INFO') pypypids = {} for k, v in thread_info.items(): pypypids[k] = v['pypy']['pid'] actlog.log(machine['host'], 'sid=', k, 'pid=', pypypids[k]) # 远程发送hotcmd, 取得进程的运行状态 rparams = ['status'] rparams.extend(procids) wst = time.time() while 1: isdone = 0 result, outputs = execute_remote_py(options, machine, rparams) if result != 0: actlog.log('read remote status false retry !') actlog.log('---------------------------------------------------------------') for l in outputs.split('\n'): actlog.log(l) actlog.log('---------------------------------------------------------------') else: try: # 获得本机的所有进程基本信息 thread_status = tyssh.parse_remote_datas_json(outputs, 'TY_THREAD_STATUS') ecount = 0.0 scount = 0.0 wcount = 0.0 errpids = [] for sid in pypypids: status = thread_status.get(sid, {}) if pypypids[sid] != status.get('pid', pypypids[sid]): ecount += 1 errpids.append(sid) else: st = status.get('status', 0) if st == 500: ecount += 1 errpids.append(sid) elif st == 200: scount += 1 else: wcount += 1 controls['percent'] = str(int(scount * 100.0 / float(len(pypypids)))) + '%' if int(scount) + int(ecount) == len(pypypids): # 全部启动成功 if int(ecount) > 0: # 有错误 controls['done'] = 1 controls['result'] = 2 controls['percent'] = 'done' controls['outputs'] = 'remote process has Exception !' for sid in errpids: actlog.log(machine['host'], 'EXCEPTION !! ->', sid) return else: # 没有错误 isdone = 1 break except: actlog.error() if isdone == 1: # 没有错误 break elif time.time() - wst > 300: # 5分钟启动超时 actlog.log(machine['host'], 'time out !!!') actlog.log('remote status timeout false !' + str(errpids)) controls['done'] = 1 controls['result'] = 2 controls['percent'] = 'done' controls['outputs'] = 'remote status timeout false !' + str(errpids) return time.sleep(1) controls['done'] = 1 controls['result'] = 1 controls['outputs'] = outputs
def _remake_static_zip(options, jdatas, alldata): webroot = options.env['webroot_path'] zfiletmp = webroot + '/static_file/static.zip' zipf = zipfile.ZipFile(zfiletmp, mode="w", compression=zipfile.ZIP_DEFLATED) zinfo = ZipInfo(filename='static.json', date_time=(2015, 12, 16, 0, 0, 0)) jdatas = json.dumps(jdatas, sort_keys=True, indent=4, separators=(', ', ' : ')) zipf.writestr(zinfo, jdatas, zipfile.ZIP_DEFLATED) zipf.close() zdata = fsutils.readFile(zfiletmp) md5s = strutil.md5digest(zdata) zfile = webroot + '/static_file/' + md5s + '.zip' os.rename(zfiletmp, zfile) conf = alldata.get('game:9999:upgrade_client_static:0', None) if conf != None: conf['static_file_md5'] = md5s ulist = conf['static_file_url'] if options.pokerdict.get('mode') != 1: http_game = options.pokerdict['http_game'] myurl = http_game + '/static_file' if myurl not in ulist: if options.pokerdict['mode'] > 1: ulist.insert(0, myurl) else: ulist.append(myurl) actlog.log('THE STATIC JSON CHANGED !! ', myurl + '/' + md5s + '.zip') actlog.log('THE STATIC JSON CHANGED !! ', zfile) setattr(options, 'push_static', 0) if fsutils.fileExists('./cdn/copy_static_file.sh'): setattr(options, 'push_static', 1) actlog.log('UPLOAD ZIP TO CDN !!') st, out = commands.getstatusoutput('./cdn/copy_static_file.sh') if st != 0: actlog.log('UPLOAD ZIP TO CDN ERROR !!') actlog.log(out) return 0 else: actlog.log('UPLOAD ZIP TO CDN OK !!', out) else: actlog.log('UPLOAD ZIP TO CDN THE SHELL NOT FOUND !') return 1
def _update_redis_file_by_diff(redisaddr, newdatas): oldkeys, olddatas = _load_old_datas(redisaddr) # REDIS中有, 配置文件中没有, 为要删除的键值 delkeys = set(oldkeys) - set(newdatas.keys()) for k in delkeys: if k.find(':map.') > 0: continue actlog.log('del->', k) # 检查要更新的键值 updatas = {} for key, newvalue in newdatas.items(): oldvalue = olddatas.get(key, None) if not isinstance(newvalue, (str, unicode)): newvalue = strutil.dumps(newvalue) if newvalue != oldvalue: updatas[key] = newvalue # actlog.log('update->', key, 'new=[', newvalue, '] old=[', oldvalue, ']') actlog.log('redis update begin !!') rpipe = tydb.get_redis_pipe(redisaddr) changelen = 0 for k in delkeys: if k.find(':map.') > 0: continue rpipe.delete(k) changelen += 1 for k, v in updatas.items(): rpipe.set(k, v) changelen += len(v) rpipe.execute() actlog.log('redis update done !!') changekeys = set(delkeys) changekeys.update(set(updatas.keys())) changekeys = list(changekeys) actlog.log('TY_REDIS_CHANGE_LIST=', strutil.dumps(changekeys)) actlog.log('+++++++++++++++++++++') for k in changekeys: # rpipe.rpush('configure.changed', k) actlog.log('CHANGED', k) # rpipe.execute() actlog.log('+++++++++++++++++++++') return changekeys, changelen
def main(): from tyserver.tyutils import fsutils actlog._with_std = 1 options = parse_cmd_lines() if not hasattr(options, 'pokerpath') : useage() actlog.log('You must input -m <poker path>') return if not hasattr(options, 'action') : useage() actlog.log('You Must input -a <action>') return pokerpath = options.pokerpath if not pokerpath : actlog.log("must specified the service entrance path") useage() return None pokerpath = fsutils.makeAsbpath(options.pokerpath) if not fsutils.dirExists(pokerpath) : actlog.log("the service entrance path not exists [" + pokerpath + ']') useage() return None setattr(options, 'pokerpath', pokerpath) workpath = fsutils.getParentDir(__file__, 1) setattr(options, 'workpath', workpath) fpath = os.environ.get('LOGDIR', '') if fpath and fsutils.dirExists(fpath): fpath = fsutils.abspath(fpath) setattr(options, 'logpath', fpath) ct = datetime.now().strftime('%Y%m%d%H%M%S') actlog.open_act_log(options, {'uuid':ct}) else: setattr(options, 'logpath', None) actlog.log("pokerpath =", options.pokerpath) actlog.log("workpath =", options.workpath) actlog.log("logpath =", options.logpath) flogfile = fsutils.appendPath(fpath, 'webmagr.log') setattr(options, 'logfile', flogfile) actlog.log("action =", options.action) if options.action == 'config_check' : action = {'action' : 'config_check', 'params':{}} thread_do_action(options, action) return if options.action == 'config_reload' : action = {'action' : 'config_reload', 'params':{}} thread_do_action(options, action) return if options.action == 'config_update' : keys = [] if hasattr(options, 'keys') : keys = options.keys.split(',') action = {'action' : 'config_update', 'params':{'keys' : keys}} thread_do_action(options, action) return if options.action == 'config_reset' : action = {'action' : 'config_update', 'params':{'reset' : 1}} thread_do_action(options, action) return if options.action == 'config_status' : action = {'action' : 'config_status', 'params':{}} thread_do_action(options, action) return if options.action == 'compile' : action = {'action' : 'compile_source', 'params':{}} thread_do_action(options, action) return if options.action == 'start' : action = {'action' : 'config_compile_start', 'params':{}} thread_do_action(options, action) return if options.action == 'reset' : if not hasattr(options, 'serverIds') : useage() actlog.log('You must input -sid XXX,XXX') return processids = options.serverIds.split(',') if not processids : useage() actlog.log('You must input -sid XXX,XXX') return action = {'action' : 'reset', 'params':{'processids' : processids}} thread_do_action(options, action) return if options.action == 'stop' : action = {'action' : 'stop_all_process', 'params':{}} thread_do_action(options, action) return if options.action == 'push_code' : action = {'action' : 'push_bin', 'params':{}} thread_do_action(options, action) return if options.action == 'push_web' : action = {'action' : 'push_web', 'params':{}} thread_do_action(options, action) return hotfixwait = 1 if options.action == 'hotfix_nowait' : options.action = 'hotfix' hotfixwait = 0 setattr(options, 'hotfixwait', hotfixwait) if options.action == 'hotfix' : if not hasattr(options, 'serverIds') : useage() actlog.log('You must input -sid XXX,XXX or -sid all') return if not hasattr(options, 'hotfixpy') : useage() actlog.log('You Must input -py xxxx.py') return hotfix.action(options) return if options.action == 'rm_logs' : action = {'action' : 'remove_all_logs', 'params':{}} thread_do_action(options, action) return actlog.log('unknow action of :', options.action) useage()
def __load_poker_file(options): pokerfile = options.pokerfile actlog.log('LOAD POKER FILE :', pokerfile) pokerdict = fsutils.readJsonFile(pokerfile, True) pokerdict['poker_path'] = options.poker_path if not isinstance(pokerdict, dict): return actlog.error('POKER FILE : format error,must be a dict') gameId = pokerdict.get('id', None) if not isinstance(gameId, int) or gameId <= 0 or gameId >= 10000: return actlog.error('POKER FILE : id value is wrong') gameName = pokerdict.get('name', None) if not isinstance(gameName, (str, unicode)) or len(gameName) <= 0: return actlog.error('POKER FILE : name value is wrong') if gameName.find('-') >= 0: return actlog.error( 'POKER FILE : name value is wrong, can not have "-" (reserved char)' ) corporation = pokerdict.get('corporation', 'tuyoo') if corporation not in ('tuyoo', 'momo'): return actlog.error( 'POKER FILE : corporation value wrong, choice: tuyoo or momo') pokerdict['corporation'] = corporation mode = pokerdict.get('mode', 0) if mode not in (1, 2, 3, 4): return actlog.error( 'POKER FILE : mode value must be an integer, choice : 1(online) or 2(simulation) or 3(rich test) or 4(tiny test)' ) port_redis = pokerdict.get('port_redis', 0) if not actutils.check_port(port_redis, True): return actlog.error( 'POKER FILE : port_redis, socket port number wrong ' + str(port_redis)) pokerdict['port_redis'] = port_redis local_internet = pokerdict.get('local_internet', '') if not isinstance(local_internet, (str, unicode)): return actlog.error('POKER FILE : local_internet value is wrong') pokerdict['local_internet'] = local_internet local_intranet = __get_value(pokerdict, 'local_intranet', None) if not isinstance(local_intranet, (str, unicode)): return actlog.error('POKER FILE : local_intranet value is wrong') pokerdict['local_intranet'] = local_intranet port_http = int(__get_value(pokerdict, 'port_http', 0)) if not actutils.check_port(port_http, True): return actlog.error( 'POKER FILE : port_http, socket port number wrong ' + str(port_http)) pokerdict['port_http'] = port_http projects_path = __get_value(pokerdict, 'projects_path', None) if not isinstance(projects_path, str): return actlog.error( 'POKER FILE : projects_path wrong, must pointing to the projects path' ) projects_path = fsutils.makeAsbpath(projects_path, pokerfile) if not fsutils.dirExists(projects_path): return actlog.error( 'POKER FILE : projects_path, the path not exists [' + projects_path + ']') pokerdict['projects_path'] = projects_path output_path = __get_value(pokerdict, 'output_path', None) if not isinstance(output_path, str): return actlog.error( 'POKER FILE : output_path wrong, must pointing to the compile output path' ) output_path = fsutils.makeAsbpath(output_path, pokerfile) if not fsutils.dirExists(output_path): return actlog.error( 'POKER FILE : output_path wrong, the path not exists [' + output_path + ']') pokerdict['output_path'] = output_path http = __get_value(pokerdict, 'http_sdk', '') if not actutils.check_http_url(http, 'POKER FILE : http_sdk'): return 0 pokerdict['http_sdk'] = http http = __get_value(pokerdict, 'http_sdk_inner', None) if http != None and not actutils.check_http_url( http, 'POKER FILE : http_sdk_inner'): return 0 pokerdict['http_sdk_inner'] = http http = __get_value(pokerdict, 'http_game', None) if http != None and not actutils.check_http_url(http, 'POKER FILE : http_game'): return 0 pokerdict['http_game'] = http http = __get_value(pokerdict, 'http_download', None) if http != None and not actutils.check_http_url( http, 'POKER FILE : http_download'): return 0 pokerdict['http_download'] = http http = __get_value(pokerdict, 'http_gdss', None) if http != None and not actutils.check_http_url(http, 'POKER FILE : http_gdss'): return 0 pokerdict['http_gdss'] = http config_redis = __get_value(pokerdict, 'config_redis', None) if not isinstance(config_redis, str): return actlog.error( 'POKER FILE : config_redis wrong, must pointing to the configure redis <ip>:<port>:<dbid>' ) confdb = config_redis.split(':') if len(confdb) != 3: return actlog.error( 'POKER FILE : config_redis wrong, must pointing to the configure redis <ip>:<port>:<dbid>' ) pokerdict['config_redis'] = config_redis return pokerdict
def _remake_static_zip(options, jdatas, alldata): webroot = options.env['webroot_path'] zfiletmp = webroot + '/static_file/static.zip' zipf = zipfile.ZipFile(zfiletmp, mode="w", compression=zipfile.ZIP_DEFLATED) zinfo = ZipInfo(filename='static.json', date_time=(2015, 12, 16, 0, 0, 0)) jdatas = json.dumps(jdatas, sort_keys=True, indent=4, separators=(', ', ' : ')) zipf.writestr(zinfo, jdatas, zipfile.ZIP_DEFLATED) zipf.close() zdata = fsutils.readFile(zfiletmp) md5s = strutil.md5digest(zdata) zfile = webroot + '/static_file/' + md5s + '.zip' os.rename(zfiletmp, zfile) conf = alldata.get('game:9999:upgrade_client_static:0', None) if conf != None : conf['static_file_md5'] = md5s ulist = conf['static_file_url'] if options.pokerdict.get('mode') != 1 : http_game = options.pokerdict['http_game'] myurl = http_game + '/static_file' if myurl not in ulist : if options.pokerdict['mode'] > 1 : ulist.insert(0, myurl) else: ulist.append(myurl) actlog.log('THE STATIC JSON CHANGED !! ', myurl + '/' + md5s + '.zip') actlog.log('THE STATIC JSON CHANGED !! ', zfile) setattr(options, 'push_static', 0) if fsutils.fileExists('./cdn/copy_static_file.sh') : setattr(options, 'push_static', 1) actlog.log('UPLOAD ZIP TO CDN !!') st, out = commands.getstatusoutput('./cdn/copy_static_file.sh') if st != 0 : actlog.log('UPLOAD ZIP TO CDN ERROR !!') actlog.log(out) return 0 else: actlog.log('UPLOAD ZIP TO CDN OK !!', out) else: actlog.log('UPLOAD ZIP TO CDN THE SHELL NOT FOUND !') return 1
def _thread_action_start(controls): """ 这个方法运行再多线程当中 """ params = controls['params'] machine = controls['machine'] options = params['options'] pfilter = params['thread_filter'] procids = getMachinePids(options, machine, pfilter) if not procids: controls['done'] = 1 controls['result'] = 1 controls['outputs'] = 'the procids is empty' return controls['percent'] = '++++' # 启动本机的所有进程 rparams = ['start'] rparams.extend(procids) result, outputs = execute_remote_py(options, machine, rparams) if result != 0: actlog.log('remote start false !') actlog.log( '---------------------------------------------------------------') for l in outputs.split('\n'): actlog.log(l) actlog.log( '---------------------------------------------------------------') controls['done'] = 1 controls['result'] = 2 controls['percent'] = 'done' controls['outputs'] = outputs return # 获得本机的所有进程基本信息 thread_info = tyssh.parse_remote_datas_json(outputs, 'TY_THREAD_INFO') pypypids = {} for k, v in thread_info.items(): pypypids[k] = v['pypy']['pid'] actlog.log(machine['host'], 'sid=', k, 'pid=', pypypids[k]) # 远程发送hotcmd, 取得进程的运行状态 rparams = ['status'] rparams.extend(procids) wst = time.time() while 1: isdone = 0 result, outputs = execute_remote_py(options, machine, rparams) if result != 0: actlog.log('read remote status false retry !') actlog.log( '---------------------------------------------------------------' ) for l in outputs.split('\n'): actlog.log(l) actlog.log( '---------------------------------------------------------------' ) else: try: # 获得本机的所有进程基本信息 thread_status = tyssh.parse_remote_datas_json( outputs, 'TY_THREAD_STATUS') ecount = 0.0 scount = 0.0 wcount = 0.0 errpids = [] for sid in pypypids: status = thread_status.get(sid, {}) if pypypids[sid] != status.get('pid', pypypids[sid]): ecount += 1 errpids.append(sid) else: st = status.get('status', 0) if st == 500: ecount += 1 errpids.append(sid) elif st == 200: scount += 1 else: wcount += 1 controls['percent'] = str( int(scount * 100.0 / float(len(pypypids)))) + '%' if int(scount) + int(ecount) == len(pypypids): # 全部启动成功 if int(ecount) > 0: # 有错误 controls['done'] = 1 controls['result'] = 2 controls['percent'] = 'done' controls['outputs'] = 'remote process has Exception !' for sid in errpids: actlog.log(machine['host'], 'EXCEPTION !! ->', sid) return else: # 没有错误 isdone = 1 break except: actlog.error() if isdone == 1: # 没有错误 break elif time.time() - wst > 300: # 5分钟启动超时 actlog.log(machine['host'], 'time out !!!') actlog.log('remote status timeout false !' + str(errpids)) controls['done'] = 1 controls['result'] = 2 controls['percent'] = 'done' controls['outputs'] = 'remote status timeout false !' + str( errpids) return time.sleep(1) controls['done'] = 1 controls['result'] = 1 controls['outputs'] = outputs
def printErrorRet(ret): if ret.find('"error"') >= 0 : actlog.log('ERROR !!') try: errs = strutil.loads(ret) for sid in errs : actlog.log('============ EXCEPTIONS OF %s ============' % (sid)) excs = errs[sid]['error'] l = excs.split('\n') for x in l : if x != '' : actlog.log(x) except: actlog.log('ERROR !!', ret) actlog.log('ERROR !!') return 1 else: actlog.log(ret) return 0
def action(options, withlog=1): serverIds = options.serverIds hotfixpy = options.hotfixpy if not hotfixpy.startswith('code://') : hotfixpy = 'file://' + hotfixpy httpgame = getattr(options, '_httpgame', None) if not httpgame : datas = redisdata._getLastOkDefines(options) httpgame = datas['pokerdict']['http_game'] + '/_http_manager_hotfix' setattr(options, '_httpgame', httpgame) hotparams = getattr(options, 'hotparams', None) if not isinstance(hotparams, dict) : hotparams = {} if withlog : actlog.log("hotfixpy =", hotfixpy) actlog.log("hotfixwait =", options.hotfixwait) actlog.log("serverIds =", serverIds) actlog.log("httpgame =", httpgame) actlog.log('hotparams =', hotparams) result = tyhttp.dohttpquery(httpgame, {'hotfixpy' : hotfixpy, 'wait' : options.hotfixwait, 'serverIds' : serverIds, 'hotparams' : strutil.dumps(hotparams)}) if withlog : actlog.log('result =', result) return 1 return result
def action(options): ''' 装载并检测服务启动配置文件 ''' alldata = {} setattr(options, 'alldata', alldata) actlog.log('options.poker_path->', options.poker_path) checkprojects = options.checkprojects loadsprojs = set() datas, allGameIds = load_project_datas_all(options.poker_path + '/game/', options) alldata.update(datas) for cproj in checkprojects: projectdir = fsutils.appendPath(options.poker_path, cproj) loadsprojs.add(projectdir) datas = load_project_datas_room(projectdir, options) alldata.update(datas) allGameIds.sort() options.pokerdict['config.game.ids'] = allGameIds actlog.log('config.game.ids->', allGameIds) alldata['poker:map.activityid'] = readJsonData( options.poker_path + '/poker/map.activityid.json', options) alldata['poker:map.bieventid'] = readJsonData( options.poker_path + '/poker/map.bieventid.json', options) alldata['poker:map.giftid'] = readJsonData( options.poker_path + '/poker/map.giftid.json', options) # 重gdss获取数据 ret = _syncDataFromGdss(options, 'poker:map.clientid', dict, 'getClientIdDict') if not ret: return ret ret = _syncDataFromGdss(options, 'poker:map.productid', dict, 'getProductIdDict') if not ret: return ret ret = make_static_json.make_static_json_file(options, alldata) if not ret: return ret outpath = options.pokerdict['output_path'] fsutils.writeFile(outpath, 'out.redis.json', alldata) fsutils.writeFile(outpath, 'out.poker.global.json', options.pokerdict) fsutils.writeFile(options.poker_path, '._confdata_.json', alldata) patchpy = os.path.dirname(outpath) + '/patch_config.py' if os.path.isfile(patchpy): cmd = 'pypy ' + patchpy + ' ' + outpath actlog.log('执行游戏配置文件补丁:', cmd) status, output = commands.getstatusoutput(cmd) for l in output.split('\n'): actlog.log(l) if status != 0: actlog.error('游戏配置文件补丁失败:', patchpy) actlog.error(status, output) return 0 if output and output.find('REMAKE_STATIC') >= 0: actlog.log('find REMAKE_STATIC !') alldata = json.loads(filterComment(outpath + '/out.redis.json')) ret = make_static_json.make_static_json_file(options, alldata) if not ret: return ret fsutils.writeFile(outpath, 'out.redis.json', alldata) fsutils.writeFile(options.poker_path, '._confdata_.json', alldata) return 1
def action(options, extkeys=[]): ''' 装载并检测服务启动配置文件 ''' sleepTime = 0.001 # reloadlist = [] changekeys = options.changekeys serverlist = options.serverlist changelen = options.changelen if extkeys : changekeys.extend(extkeys) if options.reset: changekeys = ['all'] # if options.reset: # reloadlist = ['all'] actlog.log('changekeys=', changekeys) actlog.log('changelen =', changelen) actlog.log('sleep time=', sleepTime) # hotcode = HOTCODE % (strutil.dumps(changekeys), # strutil.dumps(reloadlist), # sleepTime) config_redis = options.pokerdict['config_redis'] rconn = tydb.get_redis_conn(config_redis) _CHANGE_KEYS_NAME = 'configitems.changekey.list' rconn.rpush(_CHANGE_KEYS_NAME, 'CHANG_TIME %s' % (datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'))) for ckey in changekeys : rconn.rpush(_CHANGE_KEYS_NAME, ckey) # setattr(options, 'hotfixpy', hotcode) # setattr(options, 'hotfixwait', 1) # x = 0 # ipgroups = {} # for srv in serverlist : # x += 1 # ip = srv['ip'] # if ip in ipgroups : # sids = ipgroups[ip] # else: # sids = [] # ipgroups[ip] = sids # sids.append(srv['type'] + srv['id']) # # # 每次每个机器更新10个进程 # while 1 : # sids = [] # for ip in ipgroups.keys() : # mids = ipgroups[ip] # head = mids[0:10] # ipgroups[ip] = mids[10:] # sids.extend(head) # if not sids : # break # sids = ','.join(sids) # setattr(options, 'serverIds', sids) # actlog.log('notify config changed->', sids) # ret = hotfix.action(options, 0) # if ret.find('"error"') >= 0 : # actlog.log('ERROR !!!') # actlog.log(ret) # actlog.log('ERROR !!!') # return 0 # time.sleep(2) actlog.log('Configure update wait :', 0, '/', len(serverlist)) while 1 : okcounts, allcounts = config_status.action(options, 1) if allcounts <= 0 : # 有异常发生 actlog.log('Configure update ERROR !!') return 0 elif okcounts == allcounts : # 完成同步 actlog.log('Configure update OK :', okcounts, '/', allcounts) return 1 else : # 没有同步完成 actlog.log('Configure update WAIT :', okcounts, '/', allcounts) time.sleep(2) return 1
def action(options): """ 预编译所有的py文件到pyc,以便发现语法错误 """ if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,webroot为link模式,无需拷贝 if not makeSoInDocker(options): return 0 actlog.log('docker mode skip compiler pyc !') return 1 # 编译SO文件 bin_path = options.env['bin_path'] # LINUX WIN32 if platform.system() == 'Darwin': makesosh = [ os.path.join('freetime', 'core', 'cffi', 'makeso.sh'), os.path.join('poker', 'util', 'cffi', 'makeso.sh'), ] elif platform.system() == 'Windows': _, makesosh = fsutils.findTreeFiles( bin_path, ['.*' + os.path.sep + '(makeso.cmd)$'], ['.*\\.svn.*']) else: _, makesosh = fsutils.findTreeFiles( bin_path, ['.*' + os.path.sep + '(makeso.sh)$'], ['.*\\.svn.*']) if len(makesosh) == 0: actlog.log('run C/C++ compiler : not found !!') for msh in makesosh: cmd = bin_path + os.path.sep + msh os.chmod(cmd, stat.S_IRWXU | stat.S_IRWXG) actlog.log('run C/C++ compiler :', msh) status, output = commands.getstatusoutput(cmd) if status != 0: actlog.log('C/C++ compiler ERROR !!', cmd) actlog.log(output) return 0 # 只有再Linux运行环境下才进行预编译 if platform.system() != 'Linux': return 1 # 生成编译文件 pkgs = [] pyfiles = options._pyfiles for pyf in pyfiles: if pyf.endswith('.py') and pyf.find('hotfix') < 0: pkg = ' import ' + '.'.join(pyf.split(os.path.sep)[1:]) pkg = pkg[0:-3] if pkg.endswith('__init__'): pkg = pkg[0:-9] pkgs.append(pkg) content = ''' # -*- coding: utf-8 -*- # author time : %s import sys from twisted.internet import reactor # 确保reactor第一时间初始化, 否则可能莫名其妙的crash if sys.getdefaultencoding().lower() != 'utf-8' : reload(sys) sys.setdefaultencoding("utf-8") try: %s except: print sys.path raise ''' % (str(datetime.now()), '\n'.join(pkgs)) cfilepath = bin_path + os.path.sep + '_compiler.py' fsutils.writeFile('', cfilepath, content) actlog.log('run PYPY compiler :', cfilepath) pypy = strutil.getEnv('PYPY', 'pypy') cmd = '%s -tt %s' % (pypy, cfilepath) status, output = commands.getstatusoutput(cmd) if status != 0: actlog.log('ERROR !!', 'compile py files false !', status, cfilepath) lines = output.split('\n') for line in lines: actlog.log(line) return 0 else: # fsutils.deleteFile(cfilepath) for pyf in pyfiles: if pyf.endswith('.py'): f = bin_path + os.path.sep + pyf + 'c' if fsutils.fileExists(f): fsutils.deleteFile(f) return 1
def action(options): ''' 装载并检测服务启动配置文件 ''' alldata = {} setattr(options, 'alldata', alldata) actlog.log('options.poker_path->', options.poker_path) checkprojects = options.checkprojects loadsprojs = set() datas, allGameIds = load_project_datas_all(options.poker_path + '/game/', options) alldata.update(datas) for cproj in checkprojects: projectdir = fsutils.appendPath(options.poker_path, cproj) loadsprojs.add(projectdir) datas = load_project_datas_room(projectdir, options) alldata.update(datas) allGameIds.sort() options.pokerdict['config.game.ids'] = allGameIds actlog.log('config.game.ids->', allGameIds) alldata['poker:map.activityid'] = readJsonData(options.poker_path + '/poker/map.activityid.json', options) alldata['poker:map.bieventid'] = readJsonData(options.poker_path + '/poker/map.bieventid.json', options) alldata['poker:map.giftid'] = readJsonData(options.poker_path + '/poker/map.giftid.json', options) # 重gdss获取数据 ret = _syncDataFromGdss(options, 'poker:map.clientid', dict, 'getClientIdDict') if not ret: return ret ret = _syncDataFromGdss(options, 'poker:map.productid', dict, 'getProductIdDict') if not ret: return ret ret = make_static_json.make_static_json_file(options, alldata) if not ret: return ret outpath = options.pokerdict['output_path'] fsutils.writeFile(outpath, 'out.redis.json', alldata) fsutils.writeFile(outpath, 'out.poker.global.json', options.pokerdict) fsutils.writeFile(options.poker_path, '._confdata_.json', alldata) patchpy = os.path.dirname(outpath) + '/patch_config.py' if os.path.isfile(patchpy): cmd = 'pypy ' + patchpy + ' ' + outpath actlog.log('执行游戏配置文件补丁:', cmd) status, output = commands.getstatusoutput(cmd) for l in output.split('\n'): actlog.log(l) if status != 0: actlog.error('游戏配置文件补丁失败:', patchpy) actlog.error(status, output) return 0 if output and output.find('REMAKE_STATIC') >= 0: actlog.log('find REMAKE_STATIC !') alldata = json.loads(filterComment(outpath + '/out.redis.json')) ret = make_static_json.make_static_json_file(options, alldata) if not ret: return ret fsutils.writeFile(outpath, 'out.redis.json', alldata) fsutils.writeFile(options.poker_path, '._confdata_.json', alldata) return 1
def main(): from tyserver.tyutils import fsutils actlog._with_std = 1 options = parse_cmd_lines() if not hasattr(options, 'pokerpath'): useage() actlog.log('You must input -m <poker path>') return if not hasattr(options, 'action'): useage() actlog.log('You Must input -a <action>') return pokerpath = options.pokerpath if not pokerpath: actlog.log("must specified the service entrance path") useage() return None pokerpath = fsutils.makeAsbpath(options.pokerpath) if not fsutils.dirExists(pokerpath): actlog.log("the service entrance path not exists [" + pokerpath + ']') useage() return None setattr(options, 'pokerpath', pokerpath) workpath = fsutils.getParentDir(__file__, 1) setattr(options, 'workpath', workpath) fpath = os.environ.get('LOGDIR', '') if fpath and fsutils.dirExists(fpath): fpath = fsutils.abspath(fpath) setattr(options, 'logpath', fpath) ct = datetime.now().strftime('%Y%m%d%H%M%S') actlog.open_act_log(options, {'uuid': ct}) else: setattr(options, 'logpath', None) actlog.log("pokerpath =", options.pokerpath) actlog.log("workpath =", options.workpath) actlog.log("logpath =", options.logpath) flogfile = fsutils.appendPath(fpath, 'webmagr.log') setattr(options, 'logfile', flogfile) actlog.log("action =", options.action) if options.action == 'config_check': action = {'action': 'config_check', 'params': {}} thread_do_action(options, action) return if options.action == 'config_reload': action = {'action': 'config_reload', 'params': {}} thread_do_action(options, action) return if options.action == 'config_update': keys = [] if hasattr(options, 'keys'): keys = options.keys.split(',') action = {'action': 'config_update', 'params': {'keys': keys}} thread_do_action(options, action) return if options.action == 'config_reset': action = {'action': 'config_update', 'params': {'reset': 1}} thread_do_action(options, action) return if options.action == 'config_status': action = {'action': 'config_status', 'params': {}} thread_do_action(options, action) return if options.action == 'compile': action = {'action': 'compile_source', 'params': {}} thread_do_action(options, action) return if options.action == 'start': action = {'action': 'config_compile_start', 'params': {}} thread_do_action(options, action) return if options.action == 'reset': if not hasattr(options, 'serverIds'): useage() actlog.log('You must input -sid XXX,XXX') return processids = options.serverIds.split(',') if not processids: useage() actlog.log('You must input -sid XXX,XXX') return action = {'action': 'reset', 'params': {'processids': processids}} thread_do_action(options, action) return if options.action == 'stop': action = {'action': 'stop_all_process', 'params': {}} thread_do_action(options, action) return if options.action == 'push_code': action = {'action': 'push_bin', 'params': {}} thread_do_action(options, action) return if options.action == 'push_web': action = {'action': 'push_web', 'params': {}} thread_do_action(options, action) return hotfixwait = 1 if options.action == 'hotfix_nowait': options.action = 'hotfix' hotfixwait = 0 setattr(options, 'hotfixwait', hotfixwait) if options.action == 'hotfix': if not hasattr(options, 'serverIds'): useage() actlog.log('You must input -sid XXX,XXX or -sid all') return if not hasattr(options, 'hotfixpy'): useage() actlog.log('You Must input -py xxxx.py') return hotfix.action(options) return if options.action == 'rm_logs': action = {'action': 'remove_all_logs', 'params': {}} thread_do_action(options, action) return actlog.log('unknow action of :', options.action) useage()