def __load_project_file(options): jname = 'poker/project.json' datadict = __load_json_file(options, jname, list) if not datadict: return datadict game_packages = [] gameids = [] for proj in datadict: ppath = proj.get('path', None) if not isinstance(ppath, str): return actlog.error('the project path is not defined !', proj) if not fsutils.dirExists(ppath): return actlog.error('the project path not found !', ppath) srcpath = fsutils.appendPath(ppath, 'src') if fsutils.dirExists(srcpath): subdirs = os.listdir(srcpath) for subdir in subdirs: gamepy = fsutils.appendPath(srcpath, subdir, 'game.py') if fsutils.fileExists(gamepy): if subdir in game_packages: return actlog.error( 'the project package is double defined !', gamepy, 'proj=', proj) game_packages.append(subdir) proj['package'] = subdir gameid = proj.get('gameId', 0) if gameid > 0: gameids.append(int(gameid)) actlog.log('find gam project-> GAMEID=', gameid, 'PACKAGE=', subdir) return datadict, game_packages, gameids
def action(options, params={}): ''' ''' svnuser = params.get('svnuser', None) svnpwd = params.get('svnpwd', None) svnau = '' if svnuser and svnpwd : svnau = ' --username ' + str(svnuser) + ' --password ' + str(svnpwd) + ' --no-auth-cache ' projects_path = options.pokerdict['projects_path'] cmd = ['cd ' + projects_path] cmd.append('export LANG=en_US.UTF-8') cmd.append('echo "================================"') cmd.append('svn up --non-interactive ' + svnau + ' ./*') cmd.append('echo "================================"') # cmd.append('svn info ./*') # cmd.append('echo "================================"') cmd = ';'.join(cmd) # actlog.log('cmd line->', cmd) shname = '/home/tyhall/hall37/source/config_online/104/roomall.sh' if fsutils.fileExists(shname) : commands.getstatusoutput(shname) status, output = commands.getstatusoutput(cmd) actlog.log('cmd return->', status) actlog.log('cmd output->') lines = output.split('\n') for l in lines : actlog.log(l) actlog.log('done') return 1
def action(options, params={}): ''' ''' svnuser = params.get('svnuser', None) svnpwd = params.get('svnpwd', None) svnau = '' if svnuser and svnpwd: svnau = ' --username ' + str(svnuser) + ' --password ' + str( svnpwd) + ' --no-auth-cache ' projects_path = options.pokerdict['projects_path'] cmd = ['cd ' + projects_path] cmd.append('export LANG=en_US.UTF-8') cmd.append('echo "================================"') cmd.append('svn up --non-interactive ' + svnau + ' ./*') cmd.append('echo "================================"') # cmd.append('svn info ./*') # cmd.append('echo "================================"') cmd = ';'.join(cmd) # actlog.log('cmd line->', cmd) shname = '/home/tyhall/hall37/source/config_online/104/roomall.sh' if fsutils.fileExists(shname): commands.getstatusoutput(shname) status, output = commands.getstatusoutput(cmd) actlog.log('cmd return->', status) actlog.log('cmd output->') lines = output.split('\n') for l in lines: actlog.log(l) actlog.log('done') return 1
def do_http_get_process_list(self): jfile = fsutils.appendPath(self.options.pokerpath, '._process_.json') if fsutils.fileExists(jfile): datas = fsutils.readJsonFile(jfile) else: datas = [] datas.sort(key=lambda x: x['type'] + x['id']) mo = MsgPack() mo.setCmd('process_list') mo.setResult('datas', datas) return mo
def do_http_t3flush_get_json_file(self, jsonfile): self._initBase() ftlog.debug("do_http_t3flush_get_json_file jsonfile:", jsonfile) if fsutils.fileExists(jsonfile): datas = fsutils.readJsonFile(jsonfile) else: datas = {'error': 'file "%s" not existed' % jsonfile} mo = MsgPack() mo.setCmd('json_file_data') mo.setResult('isOk', True) mo.setResult('jsonfile', jsonfile) mo.setResult('datas', datas) return mo
def do_http_texas_get_json_file(self, jsonfile): self._initBase() ftlog.debug("do_http_texas_gen_json_file jsonfile:", jsonfile) if fsutils.fileExists(jsonfile) : datas = fsutils.readJsonFile(jsonfile) else: datas = {'error': 'file "%s" not existed' % jsonfile} mo = MsgPack() mo.setCmd('json_file_data') mo.setResult('isOk', True) mo.setResult('jsonfile', jsonfile) mo.setResult('datas', datas) return mo
def _remake_static_zip(options, jdatas, alldata): webroot = options.env['webroot_path'] zfiletmp = webroot + '/static_file/static.zip' zipf = zipfile.ZipFile(zfiletmp, mode="w", compression=zipfile.ZIP_DEFLATED) zinfo = ZipInfo(filename='static.json', date_time=(2015, 12, 16, 0, 0, 0)) jdatas = json.dumps(jdatas, sort_keys=True, indent=4, separators=(', ', ' : ')) zipf.writestr(zinfo, jdatas, zipfile.ZIP_DEFLATED) zipf.close() zdata = fsutils.readFile(zfiletmp) md5s = strutil.md5digest(zdata) zfile = webroot + '/static_file/' + md5s + '.zip' os.rename(zfiletmp, zfile) conf = alldata.get('game:9999:upgrade_client_static:0', None) if conf != None: conf['static_file_md5'] = md5s ulist = conf['static_file_url'] if options.pokerdict.get('mode') != 1: http_game = options.pokerdict['http_game'] myurl = http_game + '/static_file' if myurl not in ulist: if options.pokerdict['mode'] > 1: ulist.insert(0, myurl) else: ulist.append(myurl) actlog.log('THE STATIC JSON CHANGED !! ', myurl + '/' + md5s + '.zip') actlog.log('THE STATIC JSON CHANGED !! ', zfile) setattr(options, 'push_static', 0) if fsutils.fileExists('./cdn/copy_static_file.sh'): setattr(options, 'push_static', 1) actlog.log('UPLOAD ZIP TO CDN !!') st, out = commands.getstatusoutput('./cdn/copy_static_file.sh') if st != 0: actlog.log('UPLOAD ZIP TO CDN ERROR !!') actlog.log(out) return 0 else: actlog.log('UPLOAD ZIP TO CDN OK !!', out) else: actlog.log('UPLOAD ZIP TO CDN THE SHELL NOT FOUND !') return 1
def _remake_static_zip(options, jdatas, alldata): webroot = options.env['webroot_path'] zfiletmp = webroot + '/static_file/static.zip' zipf = zipfile.ZipFile(zfiletmp, mode="w", compression=zipfile.ZIP_DEFLATED) zinfo = ZipInfo(filename='static.json', date_time=(2015, 12, 16, 0, 0, 0)) jdatas = json.dumps(jdatas, sort_keys=True, indent=4, separators=(', ', ' : ')) zipf.writestr(zinfo, jdatas, zipfile.ZIP_DEFLATED) zipf.close() zdata = fsutils.readFile(zfiletmp) md5s = strutil.md5digest(zdata) zfile = webroot + '/static_file/' + md5s + '.zip' os.rename(zfiletmp, zfile) conf = alldata.get('game:9999:upgrade_client_static:0', None) if conf != None : conf['static_file_md5'] = md5s ulist = conf['static_file_url'] if options.pokerdict.get('mode') != 1 : http_game = options.pokerdict['http_game'] myurl = http_game + '/static_file' if myurl not in ulist : if options.pokerdict['mode'] > 1 : ulist.insert(0, myurl) else: ulist.append(myurl) actlog.log('THE STATIC JSON CHANGED !! ', myurl + '/' + md5s + '.zip') actlog.log('THE STATIC JSON CHANGED !! ', zfile) setattr(options, 'push_static', 0) if fsutils.fileExists('./cdn/copy_static_file.sh') : setattr(options, 'push_static', 1) actlog.log('UPLOAD ZIP TO CDN !!') st, out = commands.getstatusoutput('./cdn/copy_static_file.sh') if st != 0 : actlog.log('UPLOAD ZIP TO CDN ERROR !!') actlog.log(out) return 0 else: actlog.log('UPLOAD ZIP TO CDN OK !!', out) else: actlog.log('UPLOAD ZIP TO CDN THE SHELL NOT FOUND !') return 1
def __load_server_file(options, machinedict, gameids): mode = options.env['mode'] processlist, machinedict = modefactory[mode].make_process_list( options, machinedict, gameids) if not processlist: return processlist allrooms = {} # 装载房间的配置,用于获取房间进程ID和数量 for gameId in gameids: jname = 'game/' + str(gameId) + '/room/0.json' jsonfile = fsutils.appendPath(options.poker_path, jname) if fsutils.fileExists(jsonfile): actlog.log('load %-15s :' % (jname), jsonfile) rooms = fsutils.readJsonFile(jsonfile, True) if not isinstance(rooms, dict): return actlog.error( jname + ' : format error, root object must be dict') for rid in rooms: if rid in allrooms: return actlog.error(jname + ' : the roomId already defined !! ' + str(rid)) allrooms[rid] = rooms[rid] serverlist = auto_process.auto_group_process(machinedict, processlist, allrooms, mode) serverlist = strutil.replace_objevn_value(serverlist, options.env) checks = strutil.cloneData(serverlist) for _, m in options.machinedict.items(): internet = m['internet'] intranet = m['intranet'] for x in xrange(len(checks) - 1, -1, -1): p = checks[x] if p['ip'] == internet or p['ip'] == intranet: del checks[x] if len(checks) > 0: for p in checks: actlog.error('can not find machine define of server ip :', p['ip']) return 0 return serverlist
def do_http_get_json_file(self, jsonfile): jfile = fsutils.appendPath(self.options.pokerpath, jsonfile) print "DEBUG", "self.options.pokerpath:", self.options.pokerpath, "jsonfile:", jsonfile, 'jfile:', jfile if fsutils.fileExists(jfile): datas = fsutils.readJsonFile(jfile) else: if jsonfile in ('project.json', 'server.json'): datas = [] # elif jsonfile in model.models: # method = model.models[jsonfile].get('get') # if method: # datas = method() else: datas = {} mo = MsgPack() mo.setCmd('json_file_data') mo.setResult('jsonfile', jsonfile) mo.setResult('datas', datas) return mo
def get_action_log(options, action_uuid, line_num): logpath = options.logpath fpath = fsutils.appendPath(logpath, 'action.' + action_uuid + '.log') lines = [] if fsutils.fileExists(fpath) : f = None try: f = open(fpath) if line_num > 0 : for _ in xrange(line_num) : f.readline() l = f.readline() while l : lines.append(l.strip()) l = f.readline() finally: try: f.close() except: pass else: lines.append('the log file is missing !!') lines.append(fpath) return lines
def get_action_log(options, action_uuid, line_num): logpath = options.logpath fpath = fsutils.appendPath(logpath, 'action.' + action_uuid + '.log') lines = [] if fsutils.fileExists(fpath): f = None try: f = open(fpath) if line_num > 0: for _ in xrange(line_num): f.readline() l = f.readline() while l: lines.append(l.strip()) l = f.readline() finally: try: f.close() except: pass else: lines.append('the log file is missing !!') lines.append(fpath) return lines
def action(options): """ 预编译所有的py文件到pyc,以便发现语法错误 """ if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,webroot为link模式,无需拷贝 if not makeSoInDocker(options): return 0 actlog.log('docker mode skip compiler pyc !') return 1 # 编译SO文件 bin_path = options.env['bin_path'] # LINUX WIN32 if platform.system() == 'Darwin': makesosh = [ os.path.join('freetime', 'core', 'cffi', 'makeso.sh'), os.path.join('poker', 'util', 'cffi', 'makeso.sh'), ] elif platform.system() == 'Windows': _, makesosh = fsutils.findTreeFiles( bin_path, ['.*' + os.path.sep + '(makeso.cmd)$'], ['.*\\.svn.*']) else: _, makesosh = fsutils.findTreeFiles( bin_path, ['.*' + os.path.sep + '(makeso.sh)$'], ['.*\\.svn.*']) if len(makesosh) == 0: actlog.log('run C/C++ compiler : not found !!') for msh in makesosh: cmd = bin_path + os.path.sep + msh os.chmod(cmd, stat.S_IRWXU | stat.S_IRWXG) actlog.log('run C/C++ compiler :', msh) status, output = commands.getstatusoutput(cmd) if status != 0: actlog.log('C/C++ compiler ERROR !!', cmd) actlog.log(output) return 0 # 只有再Linux运行环境下才进行预编译 if platform.system() != 'Linux': return 1 # 生成编译文件 pkgs = [] pyfiles = options._pyfiles for pyf in pyfiles: if pyf.endswith('.py') and pyf.find('hotfix') < 0: pkg = ' import ' + '.'.join(pyf.split(os.path.sep)[1:]) pkg = pkg[0:-3] if pkg.endswith('__init__'): pkg = pkg[0:-9] pkgs.append(pkg) content = ''' # -*- coding: utf-8 -*- # author time : %s import sys from twisted.internet import reactor # 确保reactor第一时间初始化, 否则可能莫名其妙的crash if sys.getdefaultencoding().lower() != 'utf-8' : reload(sys) sys.setdefaultencoding("utf-8") try: %s except: print sys.path raise ''' % (str(datetime.now()), '\n'.join(pkgs)) cfilepath = bin_path + os.path.sep + '_compiler.py' fsutils.writeFile('', cfilepath, content) actlog.log('run PYPY compiler :', cfilepath) pypy = strutil.getEnv('PYPY', 'pypy') cmd = '%s -tt %s' % (pypy, cfilepath) status, output = commands.getstatusoutput(cmd) if status != 0: actlog.log('ERROR !!', 'compile py files false !', status, cfilepath) lines = output.split('\n') for line in lines: actlog.log(line) return 0 else: # fsutils.deleteFile(cfilepath) for pyf in pyfiles: if pyf.endswith('.py'): f = bin_path + os.path.sep + pyf + 'c' if fsutils.fileExists(f): fsutils.deleteFile(f) return 1
def action(options): """ 预编译所有的py文件到pyc,以便发现语法错误 """ if os.environ.get('RUN_IN_DOCKER', 0): # 在开发docker模式下,webroot为link模式,无需拷贝 if not makeSoInDocker(options): return 0 actlog.log('docker mode skip compiler pyc !') return 1 # 编译SO文件 bin_path = options.env['bin_path'] # LINUX WIN32 if platform.system() == 'Darwin': makesosh = [ os.path.join('freetime', 'core', 'cffi', 'makeso.sh'), os.path.join('poker', 'util', 'cffi', 'makeso.sh'), ] elif platform.system() == 'Windows': _, makesosh = fsutils.findTreeFiles(bin_path, ['.*' + os.path.sep + '(makeso.cmd)$'], ['.*\\.svn.*']) else: _, makesosh = fsutils.findTreeFiles(bin_path, ['.*' + os.path.sep + '(makeso.sh)$'], ['.*\\.svn.*']) if len(makesosh) == 0: actlog.log('run C/C++ compiler : not found !!') for msh in makesosh: cmd = bin_path + os.path.sep + msh os.chmod(cmd, stat.S_IRWXU | stat.S_IRWXG) actlog.log('run C/C++ compiler :', msh) status, output = commands.getstatusoutput(cmd) if status != 0: actlog.log('C/C++ compiler ERROR !!', cmd) actlog.log(output) return 0 # 只有再Linux运行环境下才进行预编译 if platform.system() != 'Linux': return 1 # 生成编译文件 pkgs = [] pyfiles = options._pyfiles for pyf in pyfiles: if pyf.endswith('.py') and pyf.find('hotfix') < 0: pkg = ' import ' + '.'.join(pyf.split(os.path.sep)[1:]) pkg = pkg[0:-3] if pkg.endswith('__init__'): pkg = pkg[0:-9] pkgs.append(pkg) content = ''' # -*- coding: utf-8 -*- # author time : %s import sys from twisted.internet import reactor # 确保reactor第一时间初始化, 否则可能莫名其妙的crash if sys.getdefaultencoding().lower() != 'utf-8' : reload(sys) sys.setdefaultencoding("utf-8") try: %s except: print sys.path raise ''' % (str(datetime.now()), '\n'.join(pkgs)) cfilepath = bin_path + os.path.sep + '_compiler.py' fsutils.writeFile('', cfilepath, content) actlog.log('run PYPY compiler :', cfilepath) pypy = strutil.getEnv('PYPY', 'pypy') cmd = '%s -tt %s' % (pypy, cfilepath) status, output = commands.getstatusoutput(cmd) if status != 0: actlog.log('ERROR !!', 'compile py files false !', status, cfilepath) lines = output.split('\n') for line in lines: actlog.log(line) return 0 else: # fsutils.deleteFile(cfilepath) for pyf in pyfiles: if pyf.endswith('.py'): f = bin_path + os.path.sep + pyf + 'c' if fsutils.fileExists(f): fsutils.deleteFile(f) return 1
def copySourceInDocker(options): for proj in options.projectlist: runpy = fsutils.appendPath(proj['path'], 'src/run.py') if fsutils.fileExists(runpy): fsutils.copyFile(runpy, options.env['bin_path'] + '/run.py')
def action(options): ''' 装载并检测服务启动配置文件 ''' projectlist, game_packages, gameids = __load_project_file(options) if not projectlist: return 0 setattr(options, 'projectlist', projectlist) options.pokerdict['game_packages'] = game_packages otherconf = os.listdir(fsutils.appendPath(options.poker_path, 'game')) conf_projects = [] for oc in otherconf: try: gid = int(oc) if gid in gameids: conf_projects.append('game/' + str(gid)) except: pass if 'game/9998' not in conf_projects: conf_projects.append('game/9998') options.pokerdict['conf.projects'] = conf_projects # actlog.log('projectlist=', projectlist) machinedict = __load_machine_file(options) if not machinedict: return 0 setattr(options, 'machinedict', machinedict) myenv = options.env for mid, mdefs in machinedict.items(): myenv[mid + '.internet'] = mdefs['internet'] myenv[mid + '.intranet'] = mdefs['intranet'] # actlog.log('machinedict=', machinedict) globaldict = __load_global_file(options) if not globaldict: return 0 setattr(options, 'globaldict', globaldict) # actlog.log('globaldict=', globaldict) options.pokerdict['log_path'] = globaldict['log_path'] myenv['log_path'] = globaldict['log_path'] dbdict = __load_db_file(options) if not dbdict: return 0 setattr(options, 'dbdict', dbdict) # actlog.log('dbdict=', dbdict) myenv['db.mysql.all'] = dbdict['mysql'].keys() myenv['db.redis.all'] = dbdict['redis'].keys() serverlist = __load_server_file(options, machinedict, gameids) if not serverlist: return 0 setattr(options, 'serverlist', serverlist) # actlog.log('serverlist=', serverlist) cmddict = __load_cmd_file(options) if not cmddict: return 0 setattr(options, 'cmddict', cmddict) # actlog.log('cmddict=', cmddict) # rooms = __load_rooms_file(options) # if not rooms : # return 0 # setattr(options, 'rooms', rooms) # actlog.log('cmddict=', cmddict) datas = {} datas['env'] = options.env datas['pokerdict'] = options.pokerdict datas['projectlist'] = options.projectlist datas['machinedict'] = options.machinedict datas['globaldict'] = options.globaldict datas['dbdict'] = options.dbdict datas['serverlist'] = options.serverlist datas['cmddict'] = options.cmddict fsutils.writeFile(options.poker_path, '._service_.json', datas) fsutils.writeFile(options.poker_path, '._process_.json', datas['serverlist']) fsutils.writeFile(options.poker_path, '._confprojects_.json', conf_projects) outpath = options.pokerdict['output_path'] fsutils.writeFile(outpath, 'out.datas.json', datas) fsutils.writeFile(outpath, 'out.db.json', datas['dbdict']) fsutils.writeFile(outpath, 'out.poker.global.json', datas['pokerdict']) fsutils.writeFile(outpath, 'out.freetime.global.json', datas['globaldict']) fsutils.writeFile(outpath, 'out.server.json', datas['serverlist']) if fsutils.fileExists(outpath + '/out.server.json.base'): s1 = fsutils.readFile(outpath + '/out.server.json.base') s2 = fsutils.readFile(outpath + '/out.server.json') if s1.strip() != s2.strip(): raise Exception('the server.ison changed !! can not go !!') slist = [] for k in datas['serverlist']: slist.append( str(k['type']) + str(k['id']) + ' ' + str(k['ip'] + ' ' + str(k.get('agent', '')))) slist.sort() slist = '\n'.join(slist) fsutils.writeFile(outpath, 'out.process.list', slist) return 1