def new(self, req, body=None): """发起订单""" body = body or {} if not isinstance(body, dict): raise InvalidArgument( 'Http body not json or content type is not application/json') jsonutils.schema_validate(body, NEWPAYMENT) money = body.get('money') uid = body.get('uid') oid = int(body.get('oid')) cid = body.get('cid') chapter = body.get('chapter') cancel_url = body.get('url') now = int(time.time() * 1000) otime = uuidutils.Gprimarykey.timeformat(oid) if (now - otime) > 600000 or otime > now: LOG.debug('Oder time %d, now %d' % (otime, now)) raise InvalidArgument('Order id error') serial = paypalApi.payment(money, cancel_url) session = endpoint_session() coins = self.order(session, paypalApi, serial, uid, oid, money, cid, chapter) return resultutils.results(result='create paypal payment success', data=[ dict(paypal=dict(paymentID=serial), oid=oid, coins=coins, money=money) ])
def drop_privileges(engine, auths, raise_error=False): jsonutils.schema_validate(auths, AUTHSCHEMA) schema = engine.url.database no_schema_engine = get_no_schema_engine(engine) sqls = [] for auth in auths: _auth = {'schema': schema, 'user': auth.get('user'), 'source': auth.get('source') or '%', 'privileges': auth.get('privileges') or 'ALL'} sql = "REVOKE %(privileges)s ON %(schema)s.* FROM '%(user)s'@'%(source)s'" % _auth sqls.append(sql) _auth.pop('privileges') _auth.pop('schema') sql = "drop user '%(user)s'@'%(source)s'" % _auth sqls.append(sql) sql = 'FLUSH PRIVILEGES' sqls.append(sql) with no_schema_engine.connect() as conn: for sql in sqls: try: r = conn.execute(sql) r.close() except DatabaseError as e: LOG.warning('Drop privileges sql [%s] catch programing error' % sql) if LOG.isEnabledFor(logging.DEBUG): LOG.exception('Message %s, errno %d' % (e.msg, e.errno)) if raise_error: raise e continue
def hotfix(self, req, group_id, objtype, entity, body=None): group_id = int(group_id) body = body or {} if objtype != common.GAMESERVER: raise InvalidArgument('Hotfix just for %s' % common.GAMESERVER) jsonutils.schema_validate(body, self.HOTFIX) body.setdefault('objtype', objtype) url = gmurl(req, group_id, interface='hotupdateconfig?RealSvrIds=0') @contextlib.contextmanager def context(reqeust_id, entitys, agents): post_run = { 'executer': 'http', 'ekwargs': { 'url': url, 'method': 'GET', 'async': False }, 'condition': 'entitys', 'ckwargs': { 'all': False, 'operator': '=', 'value': manager_common.RESULT_SUCCESS, 'counter': '>', 'count': 0 } } body.update({'post_run': post_run}) yield return self._async_bluck_rpc('hotfix', group_id, objtype, entity, body, context)
def validate(cls, data): """Validate input data matches expected failure ``dict`` format.""" try: jsonutils.schema_validate(data, cls.SCHEMA) except jsonutils.ValidationError as e: raise exc.InvalidFormat( "Failure data not of the" " expected format: %s" % (e.message), e) else: # Ensure that all 'exc_type_names' originate from one of # BASE_EXCEPTIONS, because those are the root exceptions that # python mandates/provides and anything else is invalid... causes = collections.deque([data]) while causes: cause = causes.popleft() root_exc_type = cause['exc_type_names'][-1] if root_exc_type not in cls.BASE_EXCEPTIONS: raise exc.InvalidFormat( "Failure data 'exc_type_names' must" " have an initial exception type that is one" " of %s types: '%s' is not one of those" " types" % (cls.BASE_EXCEPTIONS, root_exc_type)) sub_causes = cause.get('causes') if sub_causes: causes.extend(sub_causes)
def create(self, req, body=None): body = body or {} jsonutils.schema_validate(body, SCHEDULEJOBSCHEMA) start = datetime.datetime.fromtimestamp(body['start']) if start < int(time.time()) + 300: raise InvalidArgument('Do not add a scheduler in 5 min') job_id = uuidutils.Gkey() rpc = get_client() glock = get_global().lock('autorelase') with glock(targetutils.schedule_job(), 30): job_result = rpc.call(targetutils.target_anyone( manager_common.SCHEDULER), ctxt={'finishtime': ""}, msg={ 'method': 'scheduler', 'args': { 'job_id': job_id, 'jobdata': body } }) # job interval if not job_result: raise RpcResultError('delete_agent_precommit result is None') if job_result.get('resultcode') != manager_common.RESULT_SUCCESS: return resultutils.results(result=job_result.get('result')) return resultutils.results( result='Create scheduler job:%d success' % job_id)
def migrate(self, req, group_id, objtype, entity, body=None): """Entity变更agent""" body = body or {} entity = int(entity) group_id = int(group_id) jsonutils.schema_validate(body, self.MIGRATE) new = body.pop('new') new = int(new) body.update({'databases': True, 'chiefs': True}) session = endpoint_session(autocommit=False) query = model_query(session, AppEntity, filter=AppEntity.entity == entity) query = query.options(joinedload(AppEntity.areas, innerjoin=False)) _entity = query.one() if _entity.objtype != objtype: raise InvalidArgument('Entity is not %s' % objtype) if not self._check_file(_entity.agent_id, objtype, body.get(common.APPFILE)): return resultutils.results(result='migrate entity %d not run, check appfile fail') LOG.debug('Check appfile success, migrate start') areas = [dict(area_id=area.area_id, areaname=area.areaname, show_id=area.show_id) for area in _entity.areas] with entity_controller.migrate_with_out_data(common.NAME, entity, new, dict(token=uuidutils.generate_uuid()), drop_ports=True): _entity.agent_id = new session.commit() LOG.info('Migrate finish, now call post create entity and reset entity on new agent') entity_controller.post_create_entity( entity, common.NAME, objtype=objtype, status=_entity.status, opentime=_entity.opentime, group_id=group_id, areas=areas, migrate=True) LOG.info('Notify create entity in new agent success') return self.reset(req, group_id, objtype, entity, body)
def create(self, req, body): jsonutils.schema_validate(body, FileReuest.SCHEMA) address = body.pop('address') size = body.pop('size') md5 = body.pop('md5') ext = body.get('ext') or address.split('.')[-1] status = body.get('status', manager_common.DOWNFILE_FILEOK) if ext.startswith('.'): ext = ext[1:] session = get_session() downfile = DownFile(md5=md5, downloader=body.get('downloader', 'http'), adapter_args=body.get('adapter_args'), address=address, ext=ext, size=size, status=status, desc=body.get('desc'), uploadtime=body.get('uploadtime', timeutils.utcnow())) session.add(downfile) session.flush() return resultutils.results(result='Add file success', data=[ dict(md5=downfile.md5, size=downfile.size, uploadtime=downfile.uploadtime, downloader=downfile.downloader) ])
def drop_schema(engine, auths=None): if auths: jsonutils.schema_validate(auths, AUTHSCHEMA) if get_schema_info(engine): sql = "DROP DATABASE %s" % engine.url.database engine.execute(sql) if auths: drop_privileges(engine, auths, raise_error=False)
def cover(self, req, cid, body=None): """上传封面""" cid = int(cid) jsonutils.schema_validate(body, COVERUPLOAD) timeout = body.get('timeout', 20) fileinfo = body.get('fileinfo') comic_path = self.comic_path(cid) logfile = '%d.conver.%d.log' % (int(time.time()), cid) logfile = os.path.join(self.logdir, logfile) tmpfile = 'main.%d.pic' % int(time.time()) fileinfo.update({'overwrite': tmpfile}) tmpfile = os.path.join(comic_path, tmpfile) if os.path.exists(tmpfile): raise exceptions.ComicUploadError('Upload cover file fail') session = endpoint_session(readonly=True) query = model_query(session, Comic, filter=Comic.cid == cid) comic = query.one() rename = 'main.%s' % comic.ext port = max(WSPORTS) WSPORTS.remove(port) def _exitfunc(): WSPORTS.add(port) if not os.path.exists(tmpfile): LOG.error('comic cover file %s not exist' % tmpfile) else: LOG.info('Call shell command convert') convert.convert_cover(tmpfile, rename=rename, logfile=logfile) LOG.info('Convert execute success') ws = LaunchRecverWebsocket(WEBSOCKETPROC) try: uri = ws.upload(user=CF.user, group=CF.group, ipaddr=CF.ipaddr, port=port, rootpath=comic_path, fileinfo=fileinfo, logfile=logfile, timeout=timeout) except Exception: WSPORTS.add(port) return resultutils.results( result='upload cover get websocket uri fail', resultcode=manager_common.RESULT_ERROR) else: ws.asyncwait(exitfunc=_exitfunc) return resultutils.results( result='upload cover get websocket uri success', data=[uri])
def ready(self, req, database_id, body=None): body = body or {} jsonutils.schema_validate(body, self.SLAVEREADY) database_id = int(database_id) kwargs = dict(req=req) kwargs.update(body) dbmanager = _impl(database_id) dbresult = dbmanager.ready_relation(database_id, **kwargs) return resultutils.results(result='Set relation to ready success', data=[ dbresult, ])
def _kwarg_check(self, kwargs): jsonutils.schema_validate(kwargs, self.HTTPKWARGS) url = kwargs.pop('url') method = kwargs.pop('method', 'GET') headers = kwargs.pop('headers', None) params = kwargs.pop('params', None) json = kwargs.pop('json', None) data = kwargs.pop('data', None) timeout = kwargs.pop('timeout', 5) async = kwargs.pop('async', True) return dict(url=url, method=method, headers=headers, params=params, data=data, json=json, timeout=timeout, async=async)
def slave(self, req, database_id, body=None): """master slave(bond) a slave database""" body = body or {} jsonutils.schema_validate(body, self.BONDSLAVE) database_id = int(database_id) kwargs = dict(req=req) kwargs.update(body) dbmanager = _impl(database_id) dbresult = dbmanager.slave_database(database_id, **kwargs) return resultutils.results(result='master bond slave database success', data=[ dbresult, ])
def unbond(self, req, database_id, body=None): """slave unbond master""" body = body or {} jsonutils.schema_validate(body, self.UNBONDMASTER) database_id = int(database_id) kwargs = dict(req=req) kwargs.update(body) dbmanager = _impl(database_id) dbresult = dbmanager.unbond_database(database_id, **kwargs) return resultutils.results(result='unbond slave database success', data=[ dbresult, ])
def _compile(self, position, rctxt): LOG.debug('try compile %s ctxt function' % position) jsonutils.schema_validate(rctxt, self.AYNCRUNCTXT) executer = rctxt.pop('executer') ekwargs = rctxt.pop('ekwargs', None) condition = rctxt.pop('condition', None) ckwargs = rctxt.pop('ckwargs', None) executer_cls = self.executers[executer] condition_cls = self.conditions[condition] if condition else None return executer_cls(ekwargs, condition_cls(position, ckwargs) if condition else None)
def notify(self, req, oid, body=None): body = body or {} if not isinstance(body, dict): raise InvalidArgument( 'Http body not json or content type is not application/json') oid = int(oid) now = int(time.time() * 1000) otime = uuidutils.Gprimarykey.timeformat(oid) if (now - otime) > 600000 or otime > now: raise InvalidArgument('Order id error or more the 600s') jsonutils.schema_validate(body, ESUREPAY) paypal = body.get('paypal') uid = body.get('uid') session = endpoint_session() query = model_query(session, Order, filter=Order.oid == oid) order = query.one() if order.uid != uid: raise InvalidArgument('User id not the same') if order.serial != paypal.get('paymentID'): raise InvalidArgument('paymentID not the same') def paypal_execute(extdata=None): LOG.info('Call paypalApi execute order') paypalApi.execute(paypal, order.money) return extdata try: self.record(session, order, None, None, on_transaction_call=paypal_execute) except DBError: LOG.error('Paypal save order %d to database fail' % order.oid) except exceptions.EsureOrderError: LOG.error('Call Paypal execute order fail') raise return resultutils.results(result='notify orde success', data=[ dict(paypal=dict( paymentID=paypal.get('paymentID'), payerID=paypal.get('payerID')), oid=oid, coins=order.gift + order.coin, money=order.money) ])
def create_schema(engine, auths=None, character_set=None, collation_type=None, **kwargs): if auths: jsonutils.schema_validate(auths, AUTHSCHEMA) schema = engine.url.database no_schema_engine = get_no_schema_engine(engine, **kwargs) if get_schema_info(engine): raise exceptions.DBExist(schema) if not character_set: character_set = 'utf8' sql = "CREATE DATABASE %s DEFAULT CHARACTER SET %s" % (schema, character_set) if collation_type: sql += ' COLLATE %s' % collation_type with privileges(engine, auths): no_schema_engine.execute(sql)
def overtime(self, req, request_id, body): """ agent not response, async checker send a overtime respone 此接口为保留接口,接口功能已经在rpc server中实现 """ jsonutils.schema_validate(body, OVERTIMESCHEMA) agent_time = body.get('agent_time') agents = set(body.get('agents')) session = get_session() query = model_query(session, AsyncRequest).filter_by(request_id=request_id) asynecrequest = query.one() if asynecrequest.status == manager_common.FINISH: raise InvalidArgument('Async request has been finished') def bluk(): bulk_data = [] for agent_id in agents: data = dict(request_id=request_id, agent_id=agent_id, agent_time=agent_time, server_time=int(time.time()), resultcode=manager_common.RESULT_OVER_FINISHTIME, result='Agent respone overtime') bulk_data.append(data) responeutils.bluk_insert( storage=get_cache() if asynecrequest.expire else session, agents=agents, bulk_data=bulk_data, expire=asynecrequest.expire) if agents: query.update({ 'status': manager_common.FINISH, 'resultcode': manager_common.RESULT_NOT_ALL_SUCCESS, 'result': '%d agent not respone' % len(agents) }) else: query.update({ 'status': manager_common.FINISH, 'resultcode': manager_common.RESULT_SUCCESS, 'result': 'all agent respone result' % len(agents) }) session.flush() session.close() threadpool.add_thread(bluk) return resultutils.results(result='Post agent overtime success')
def create_privileges(engine, auths): jsonutils.schema_validate(auths, AUTHSCHEMA) schema = engine.url.database no_schema_engine = get_no_schema_engine(engine) sqls = [] for auth in auths: _auth = {'schema': schema, 'user': auth.get('user'), 'passwd': auth.get('passwd'), 'source': auth.get('source') or '%', 'privileges': auth.get('privileges') or 'ALL'} sql = "GRANT %(privileges)s ON %(schema)s.* TO '%(user)s'@'%(source)s' IDENTIFIED by '%(passwd)s'" % _auth sqls.append(sql) sqls.append('FLUSH PRIVILEGES') with no_schema_engine.connect() as conn: for sql in sqls: r = conn.execute(sql) r.close()
def create(self, req, database_id, body=None): """create schema in database with database_id """ body = body or {} database_id = int(database_id) jsonutils.schema_validate(body, self.CREATESCHEMA) auth = body.pop('auth', None) options = body.pop('options', None) schema = body.pop('schema', None) self._validate_schema(schema) kwargs = dict(req=req) kwargs.update(body) dbmanager = _impl(database_id) dbresult = dbmanager.create_schema(database_id, schema, auth, options, **kwargs) return resultutils.results(result='create empty schema success', data=[ dbresult, ])
def notify_prepare(notify): if not notify: return EmptyNotify(notify) notifys = set() for data in six.itervalues(notify): jsonutils.schema_validate(data, NOTIFYSCHEMA) if 'target' in data: notifys.add(GopRpcNotify) if 'action' in data: notifys.add(GopHttpNotify) if 'url' in data: notifys.add(HttpNotify) if len(notifys) != 1: notifys.clear() raise ValueError('Notify type error, more then one') cls = notifys.pop() for attrib in notify: if not hasattr(cls, attrib): raise AttributeError('Notify has no %s' % attrib) LOG.info('Prepare notify %s' % cls.__name__) return cls(notify)
def new(self, req, body=None): """发起订单""" body = body or {} if not isinstance(body, dict): raise InvalidArgument( 'Http body not json or content type is not application/json') jsonutils.schema_validate(body, NEWPAYMENT) money = body.get('money') uid = body.get('uid') cid = body.get('cid') chapter = body.get('chapter') start_time = int(time.time()) oid = uuidutils.Gkey() prepay_id, sign, random_str = weiXinApi.payment( money, oid, start_time, req) session = endpoint_session() coins = self.order(session, weiXinApi, None, uid, oid, money, cid, chapter, ext={'prepay_id': prepay_id}, order_time=start_time) return resultutils.results(result='create paypal payment success', data=[ dict(oid=oid, coins=coins, money=money, weixin=dict(prepay_id, time=start_time, sign=sign, random=random_str)) ])
def create(self, req, body=None): body = body or {} jsonutils.schema_validate(body, self.CREATEDATABASE) impl = body.pop('impl') dbtype = body.pop('dbtype') user = body.pop('user') passwd = body.pop('passwd') dbversion = body.pop('dbversion', None) affinity = body.pop('affinity', 0) if body.get('slave'): if body.get('bond'): raise InvalidArgument( 'Slave database can not bond to another database ') affinity = 0 kwargs = dict(req=req) kwargs.update(body) dbmanager = utils.impl_cls('wsgi', impl) dbresult = dbmanager.create_database(user, passwd, dbtype, dbversion, affinity, **kwargs) return resultutils.results(result='create database success', data=[ dbresult, ])
def upgrade(self, req, group_id, objtype, entity, body=None): body = body or {} jsonutils.schema_validate(body, self.UPGRADE) objfiles = body.get('objfiles') if not objfiles: raise InvalidArgument('Not objfile found for upgrade') request_time = body.get('request_time') finishtime = body.get('finishtime') timeline = body.get('timeline') or request_time runtime = finishtime - request_time for subtype in objfiles: if subtype not in (common.APPFILE, common.DATADB, common.LOGDB): raise InvalidArgument('json schema error') objfile = objfiles[subtype] if objfile.get('timeout') + request_time > finishtime: raise InvalidArgument('%s timeout over finishtime' % subtype) body.update({ 'timeline': timeline, 'deadline': finishtime + 3 + (runtime * 2) }) body.setdefault('objtype', objtype) return self._async_bluck_rpc('upgrade', group_id, objtype, entity, body)
def index(self, req, body=None): body = body or {} jsonutils.schema_validate(body, INDEXSCHEMA) session = get_session(readonly=True) order = body.get('order', None) desc = body.get('desc', False) status = body.get('status', None) page_num = body.pop('page_num', 0) filter_list = [] start = int(body.get('start', 0)) end = int(body.get('end', 0)) if start: filter_list.append(AsyncRequest.request_time >= end) if end: if end < start: raise InvalidArgument('end time less then start time') filter_list.append(AsyncRequest.request_time < end) if status is not None: filter_list.append(AsyncRequest.status == status) request_filter = and_(*filter_list) return resultutils.bulk_results(session, model=AsyncRequest, columns=[ AsyncRequest.request_id, AsyncRequest.resultcode, AsyncRequest.status, AsyncRequest.request_time, AsyncRequest.finishtime, AsyncRequest.deadline, AsyncRequest.expire, ], counter=AsyncRequest.request_id, order=order, desc=desc, filter=request_filter, page_num=page_num, limit=200)
def new(self, req, body=None): """发起订单""" body = body or {} if not isinstance(body, dict): raise InvalidArgument( 'Http body not json or content type is not application/json') jsonutils.schema_validate(body, NEWPAYMENT) money = body.get('money') uid = body.get('uid') cid = body.get('cid') chapter = body.get('chapter') # h5 = bool(body.get('h5')) start_time = int(time.time()) oid = uuidutils.Gkey() transid, url, url_r, url_h = iPayApi.payment(money, oid, req) session = endpoint_session() coins = self.order(session, iPayApi, transid, uid, oid, money, cid, chapter, order_time=start_time) return resultutils.results(result='create ipay payment success', data=[ dict(ipay=dict(transid=transid, url=url, url_r=url_r, url_h=url_h), oid=oid, coins=coins, money=money) ])
def create(self, req, body=None): """创建新漫画""" body = body or {} jsonutils.schema_validate(body, NEWCOMIC) name = body.get('name') type = body.get('type') region = body.get('region') author = body.get('author') ext = body.get('ext', 'webp') session = endpoint_session() comic = Comic(name=name, type=type, author=author, region=region, ext=ext) with _prepare_comic_path() as prepare: with session.begin(): session.add(comic) session.flush() prepare.ok(comic.cid) LOG.info('Create comic success') return resultutils.results(result='create comic success', data=[dict(cid=comic.cid, name=comic.name)])
def flushconfig(self, req, group_id, objtype, entity, body=None): body = body or {} group_id = int(group_id) jsonutils.schema_validate(body, self.FLUSH) if objtype == common.GAMESERVER: gm = body.pop(common.GMSERVER, 0) cross = body.pop(common.CROSSSERVER, 0) entitys = [] if gm: entitys.append(gm) if cross: entitys.append(cross) entitys = list(set(entitys)) if entitys: chiefs = {} session = endpoint_session() query = model_query(session, AppEntity, filter=and_(AppEntity.group_id == group_id, AppEntity.entity.in_(entitys))) gmsvr = crosssvr = None for appserver in query: if appserver.group_id != group_id: raise InvalidArgument('Entity group value error') if appserver.objtype == common.GMSERVER: if appserver.entity != gm: raise InvalidArgument('Find %s but entity is %d' % (common.GMSERVER, gm)) gmsvr = appserver elif appserver.objtype == common.CROSSSERVER: if appserver.entity != cross: raise InvalidArgument('Find %s but entity is %d' % (common.CROSSSERVER, cross)) crosssvr = appserver if gm and not gmsvr: raise InvalidArgument('%s.%d can not be found' % (common.GMSERVER, gm)) if cross and not crosssvr: raise InvalidArgument('%s.%d can not be found' % (common.CROSSSERVER, cross)) # 获取实体相关服务器信息(端口/ip) maps = entity_controller.shows(endpoint=common.NAME, entitys=entitys) if gmsvr: chiefs.setdefault( common.GMSERVER, dict( entity=gmsvr.entity, ports=maps.get(gmsvr.entity).get('ports'), local_ip=maps.get( gmsvr.entity).get('metadata').get('local_ip'))) if crosssvr: chiefs.setdefault( common.CROSSSERVER, dict(entity=crosssvr.entity, ports=maps.get(crosssvr.entity).get('ports'), local_ip=maps.get(crosssvr.entity).get( 'metadata').get('local_ip'))) body.update({'chiefs': chiefs}) return self._async_bluck_rpc('flushconfig', group_id, objtype, entity, body)
def _download(self, md5, timeout): if not attributes.is_md5_like(md5): raise ValueError('%s is not md5, can not download' % md5) try: return self.downloading[md5] except KeyError: ev = event.Event() self.downloading[md5] = ev fileinfo = self.infoget(md5) LOG.debug('Try download file of %s', str(fileinfo)) jsonutils.schema_validate(fileinfo, FileManager.SCHEMA) if md5 != fileinfo['md5']: self.downloading.pop(md5, None) ev.send(exc=exceptions.FileManagerError('Md5 not the same!')) raise exceptions.FileManagerError('Md5 not the same!') def __download(): address = fileinfo['address'] filename = fileinfo['md5'] + os.extsep + fileinfo['ext'] path = os.path.join(self.path, filename) if os.path.exists(path): LOG.info('Output file %s alreday exist' % path) try: _md5 = digestutils.filemd5(path) size = os.path.getsize(path) except (OSError, IOError) as e: LOG.error('Download get size,md5 fail') ev.send(exc=e) self.downloading.pop(fileinfo['md5'], None) raise e else: # default downloader http _downloader = downloader_factory( fileinfo.get('downloader', 'http'), fileinfo.get('adapter_args', [])) LOG.info('Download %s with %s' % (address, _downloader.__class__.__name__)) try: _md5 = _downloader.download(address, path, timeout) size = os.path.getsize(path) LOG.info( 'Download file %s success, wirte to local database' % fileinfo['md5']) except Exception as e: if os.path.exists(path): try: os.remove(path) except (OSError, IOError): LOG.error('Download fail, remove path %s fail' % path) self.downloading.pop(fileinfo['md5'], None) ev.send(exc=e) raise e try: if _md5 != fileinfo['md5'] or size != fileinfo['size']: if os.path.exists(path): try: os.remove(path) except (OSError, IOError): LOG.error('Download fail, remove path %s fail' % path) raise exceptions.FileNotMatch( 'File md5 or size not the same') # write into database uploadtime = fileinfo.get('uploadtime') if uploadtime: uploadtime = datetime.datetime.strptime( uploadtime, '%Y-%m-%d %H:%M:%S') try: localfile = self.localmd5[md5] except KeyError: file_detil = models.FileDetail(md5=md5, size=size, ext=fileinfo['ext'], desc=fileinfo.get( 'desc', 'unkonwn file'), address=fileinfo['address'], uploadtime=uploadtime) self.session.add(file_detil) self.session.flush() localfile = LocalFile(path, md5, size) self.localpath[path] = localfile self.localmd5[md5] = localfile if localfile.size != size: try: os.remove(path) except (OSError, IOError): LOG.error('Download file size not match') raise exceptions.FileManagerError('Size not match') self.downloading.pop(md5, None) ev.send(result=None) except Exception as e: self.downloading.pop(md5, None) ev.send(exc=e) raise self.threadpool.add_thread_n(__download) return ev
def create(self, req, group_id, objtype, body=None): body = body or {} group_id = int(group_id) jsonutils.schema_validate(body, self.CREATEAPPENTITY) if body.get('packages'): raise InvalidArgument('Package parameter is removed') # 找cross服务, gameserver专用 cross_id = body.pop('cross_id', None) # 开服时间, gameserver专用 opentime = body.pop('opentime', None) # 区服显示id, gameserver专用 show_id = body.pop('show_id', None) # 区服显示民称, gameserver专用 areaname = body.pop('areaname', None) # 平台类型 platform = body.get('platform') include = set(body.pop('include', [])) exclude = set(body.pop('exclude', [])) if include and exclude: raise InvalidArgument('Both packages and exclude is forbidden') packages = [] session = endpoint_session() if objtype == common.GAMESERVER: platform = common.PlatformTypeMap.get(platform) if not areaname or not opentime or not platform or not show_id: raise InvalidArgument( '%s need opentime and areaname and platform and show_id' % objtype) # 安装文件信息 appfile = body.pop(common.APPFILE) LOG.debug('Try find agent and database for entity') # 选择实例运行服务器 agent_id = body.get('agent_id') or self._agentselect( req, objtype, **body) # 选择实例运行数据库 databases = self._dbselect(req, objtype, **body) # 校验数据库信息 if not self._validate_databases(objtype, databases): raise InvalidArgument('Miss some database') LOG.info( 'Find agent and database for entity success, to agent %d, to databse %s' % (agent_id, str(databases))) query = model_query(session, Group, filter=Group.group_id == group_id) joins = joinedload(Group.entitys, innerjoin=False) joins = joins.joinedload(AppEntity.databases, innerjoin=False) query = query.options(joins) _group = query.one() glock = get_gamelock() with glock.grouplock(group_id): if objtype == common.GAMESERVER: _pquery = model_query(session, Package, filter=Package.group_id == group_id) _packages = set([ p.package_id for p in _pquery.all() if p.platform & platform ]) if (include - _packages) or (exclude - _packages): raise InvalidArgument( 'Package can not be found in include or exclude') if exclude: packages = _packages - exclude elif include: packages = include else: packages = _packages typemap = {} for _entity in _group.entitys: # 跳过未激活的实体 if _entity.status != common.OK: continue try: typemap[_entity.objtype].append(_entity) except KeyError: typemap[_entity.objtype] = [ _entity, ] # 前置实体 chiefs = None # 相同类型的实例列表 same_type_entitys = typemap.get(objtype, []) if objtype == common.GMSERVER: # GM服务不允许相同实例,必须clean掉所有同组GM服务器 for _entity in _group.entitys: if _entity.objtype == common.GMSERVER: return resultutils.results( result='create entity fail, %s duplicate in group' % objtype, resultcode=manager_common.RESULT_ERROR) else: # 非gm实体添加需要先找到同组的gm try: gm = typemap[common.GMSERVER][0] if gm.status <= common.DELETED: return resultutils.results( result='Create entity fail, gm mark deleted', resultcode=manager_common.RESULT_ERROR) except KeyError as e: return resultutils.results( result='Create entity fail, can not find GMSERVER: %s' % e.message, resultcode=manager_common.RESULT_ERROR) if objtype == common.GAMESERVER: if model_count_with_key( session, GameArea, filter=and_(GameArea.group_id == group_id, GameArea.areaname == areaname)): return resultutils.results( result='Create entity fail, name exist', resultcode=manager_common.RESULT_ERROR) cross = None # 游戏服务器需要在同组中找到cross实例 try: crossservers = typemap[common.CROSSSERVER] except KeyError as e: return resultutils.results( result= 'create entity fail, can not find my chief: %s' % e.message, resultcode=manager_common.RESULT_ERROR) # 如果指定了cross实例id if cross_id: # 判断cross实例id是否在当前组中 for _cross in crossservers: if cross_id == _cross.entity: cross = _cross break else: # 游戏服没有相同实例,直接使用第一个cross实例 if not same_type_entitys: cross = crossservers[0] else: # 统计所有cross实例的引用次数 counted = set() counter = dict() for _cross in crossservers: counter.setdefault(_cross.entity, 0) # 查询当前组内所有entity对应的cross_id for _entity in _group.entitys: if _entity.objtype != common.GAMESERVER: continue if _entity.cross_id in counted: continue counter[_entity.cross_id] += 1 # 选取引用次数最少的cross_id cross_id = sorted( zip(counter.itervalues(), counter.iterkeys()))[0][1] for _cross in crossservers: if cross_id == _cross.entity: cross = _cross break if not cross: raise InvalidArgument( 'cross server can not be found or not active') # 获取实体相关服务器信息(端口/ip) maps = entity_controller.shows( endpoint=common.NAME, entitys=[gm.entity, cross.entity]) for v in six.itervalues(maps): if v is None: raise InvalidArgument( 'Get chiefs info error, agent not online?') chiefs = dict() # 战场与GM服务器信息 for chief in (cross, gm): chiefmetadata = maps.get(chief.entity).get('metadata') ports = maps.get(chief.entity).get('ports') if not chiefmetadata: raise InvalidArgument( '%s.%d is offline' % (chief.objtype, chief.entity)) need = common.POSTS_COUNT[chief.objtype] if need and len(ports) != need: raise InvalidArgument('%s.%d port count error, ' 'find %d, need %d' % (chief.objtype, chief.entity, len(ports), need)) chiefs.setdefault( chief.objtype, dict(entity=chief.entity, ports=ports, local_ip=chiefmetadata.get('local_ip'))) cross_id = cross.entity # 完整的rpc数据包 create_body = dict(objtype=objtype, appfile=appfile, databases=databases, chiefs=chiefs, entity=int(body.get('entity', 0))) with session.begin(): body.setdefault('finishtime', rpcfinishtime()[0] + 5) try: create_result = entity_controller.create( req=req, agent_id=agent_id, endpoint=common.NAME, body=create_body)['data'][0] except RpcResultError as e: LOG.error('Create entity rpc call fail: %s' % e.message) raise InvalidArgument(e.message) entity = create_result.get('entity') rpc_result = create_result.get('notify') LOG.info('Create new entity %d' % entity) LOG.debug('Entity controller create rpc result %s', str(rpc_result)) # 插入实体信息 appentity = AppEntity(entity=entity, agent_id=agent_id, group_id=group_id, objtype=objtype, cross_id=cross_id, opentime=opentime, platform=platform) session.add(appentity) session.flush() if objtype == common.GAMESERVER: areaname = areaname.decode('utf-8') if isinstance( areaname, six.binary_type) else areaname gamearea = GameArea(group_id=_group.group_id, show_id=show_id, areaname=areaname, gid=None, entity=appentity.entity) session.add(gamearea) session.flush() # area id插入渠道包包含列表中,批量操作 if packages: for package_id in packages: session.add( PackageArea(package_id=package_id, area_id=gamearea.area_id)) session.flush() # 插入数据库绑定信息 if rpc_result.get('databases'): self._bondto(session, entity, rpc_result.get('databases')) else: LOG.error('New entity database miss') _result = dict(entity=entity, objtype=objtype, agent_id=agent_id, connection=rpc_result.get('connection'), ports=rpc_result.get('ports'), databases=rpc_result.get('databases')) areas = [] if objtype == common.GAMESERVER: areas = [ dict(area_id=gamearea.area_id, gid=0, areaname=areaname, show_id=show_id) ] _result.setdefault('areas', areas) _result.setdefault('cross_id', cross_id) _result.setdefault('opentime', opentime) _result.setdefault('platform', platform) _result.setdefault('packages', sorted(packages)) # 添加端口 # threadpool.add_thread(port_controller.unsafe_create, # agent_id, common.NAME, entity, rpc_result.get('ports')) port_controller.unsafe_create(agent_id, common.NAME, entity, rpc_result.get('ports')) # agent 后续通知 threadpool.add_thread(entity_controller.post_create_entity, entity, common.NAME, objtype=objtype, status=common.UNACTIVE, opentime=opentime, group_id=group_id, areas=areas) return resultutils.results(result='create %s entity success' % objtype, data=[ _result, ])
def new(self, req, cid, chapter, body=None): """添加新章节""" cid = int(cid) chapter = int(chapter) body = body or {} jsonutils.schema_validate(body, NEWCHAPTER) impl = body.get('impl') timeout = body.get('timeout') strict = body.get('strict', True) logfile = os.path.join( self.logdir, '%d.chapter.%d.%d.log' % (int(time.time()), cid, chapter)) comic_path = self.comic_path(cid) # 创建资源url加密key key = ''.join(random.sample(string.lowercase, 6)) ext = '' if impl['type'] == 'websocket': tmpfile = 'chapter.%d.uploading' % int(time.time()) fileinfo = impl.get('fileinfo') fileinfo.update({'overwrite': tmpfile}) tmpfile = os.path.join(comic_path, tmpfile) if os.path.exists(tmpfile): raise exceptions.ComicUploadError('Upload chapter file fail') try: port = WSPORTS.pop() except KeyError: raise InvalidArgument('Too many websocket process') def _websocket_func(): WSPORTS.add(port) LOG.info( 'Try convert new chapter %d.%d from file:%s, type:%s' % (cid, chapter, tmpfile, ext)) # checket chapter file try: count = self._convert_new_chapter(tmpfile, cid, ext, chapter, key, logfile, strict) except Exception as e: LOG.error( 'convert new chapter from websocket upload file fail') self._unfinish(cid, chapter) try: if os.path.exists(tmpfile): os.remove(tmpfile) except (OSError, IOError): LOG.error('Revmove websocket uploade file %s fail' % tmpfile) raise e else: self._finishe(cid, chapter, dict(max=count, key=key)) elif impl['type'] == 'local': path = impl['path'] if '.' in path: raise InvalidArgument('Dot is not allow in path') if path.startswith('/'): raise InvalidArgument('start with / is not allow') path = os.path.join(self.tmpdir, path) if not os.path.exists(path) or not os.path.isdir(path): raise InvalidArgument('Target path %s not exist' % path) def _local_func(): LOG.info( 'Try convert new chapter %d.%d from path:%s, type:%s' % (cid, chapter, path, ext)) try: count = self._convert_new_chapter(path, cid, ext, chapter, key, logfile, strict) except Exception as e: LOG.error( 'convert new chapter from local dir %s fail, %s' % (path, e.__class__.__name__)) if LOG.isEnabledFor(logging.DEBUG): LOG.exception(e.message) self._unfinish(cid, chapter) raise else: self._finish(cid, chapter, dict(max=count, key=key)) else: raise NotImplementedError session = endpoint_session() query = session.query(Comic).filter(Comic.cid == cid).with_for_update() worker = None with _prepare_chapter_path(cid, chapter): with session.begin(): comic = query.one() LOG.info('Crate New chapter of %d' % cid) last = comic.last if (last + 1) != chapter: raise InvalidArgument('New chapter value error') chapters = msgpack.unpackb(comic.chapters) if len(chapters) != comic.last: LOG.error('Comic chapter is uploading') raise InvalidArgument('Comic chapter is uploading') comic.last = chapter session.flush() ext = comic.ext # 注意: 下面的操作会导致漫画被锁定较长时间, if impl['type'] == 'websocket': ws = LaunchRecverWebsocket(WEBSOCKETPROC) try: uri = ws.upload(user=CF.user, group=CF.group, ipaddr=CF.ipaddr, port=port, rootpath=comic_path, fileinfo=impl['fileinfo'], logfile=logfile, timeout=timeout) except Exception: WSPORTS.add(port) return resultutils.results( result='upload cover get websocket uri fail', resultcode=manager_common.RESULT_ERROR) else: ws.asyncwait(exitfunc=_websocket_func) worker = uri LOG.info('New chapter from websocket port %d' % port) elif impl['type'] == 'local': LOG.info('New chapter from local path %s, spawning' % path) eventlet.spawn(_local_func) else: raise NotImplementedError return resultutils.results( result='new chapter spawning', data=[dict(cid=comic.cid, name=comic.name, worker=worker)])