def _handle_fault_response(self, status_code, response_body, resp): # Create exception with HTTP status code and message try: json_data = jsonutils.loads_as_bytes(response_body) resone = json_data.get('msg') if json_data.get( 'msg') else response_body except ValueError: resone = response_body except Exception: LOG.exception('Resopne error') raise if 400 <= status_code < 500: LOG.debug('Http request get client error: %s' % str(resone)) raise exceptions.ClientRequestError(code=status_code, resone=resone) elif 500 <= status_code < 600: LOG.debug('Http request get server error: %s' % str(resone)) if status_code == 501: raise exceptions.ServerNotImplementedError(resone=resone) raise exceptions.ServerInternalError(code=status_code, resone=resone) else: LOG.error('Http request unknown error: %s' % str(resone)) raise exceptions.ServerRsopneCodeError(code=status_code, resone=resone)
def orders(self, req, uid, body=None): """用户订单查询""" uid = int(uid) session = endpoint_session(readonly=True) query = model_query(session, Order, filter=Order.uid == uid) query = query.order_by(Order.oid.desc()) return resultutils.results( result='show order of user success', data=[ dict( oid=order.oid, sandbox=order.sandbox, uid=order.uid, coins=order.coins, gifts=order.gifts, coin=order.coin, gift=order.gift, money=order.money, platform=order.platform, serial=order.serial, time=order.time, cid=order.cid, chapter=order.chapter, ext=jsonutils.loads_as_bytes(order.ext) if order.ext else None, ) for order in query ])
def deserialize(self, data, status_code): """Deserializes a JSON string into a dictionary. return: type dict, data make by function `results` above this class """ if status_code == 204: return data return jsonutils.loads_as_bytes(data)
def show(self, req, oid, body=None): """订单详情""" body = body or {} oid = int(oid) session = endpoint_session(readonly=True) query = model_query(session, Order, filter=Order.oid == oid) order = query.one() return resultutils.results( result='show order success', data=[ dict( oid=order.oid, sandbox=order.sandbox, uid=order.uid, coins=order.coins, gifts=order.gifts, coin=order.coin, gift=order.gift, money=order.money, platform=order.platform, serial=order.serial, time=order.time, cid=order.cid, chapter=order.chapter, ext=jsonutils.loads_as_bytes(order.ext) if order.ext else None, ) ])
def show(self, req, oid, body=None): """完成订单详情""" body = body or {} oid = int(oid) session = endpoint_session(readonly=True) query = model_query(session, RechargeLog, filter=RechargeLog.oid == oid) relog = query.one() return resultutils.results( result='show recharge log success', data=[ dict( oid=relog.oid, sandbox=relog.sandbox, uid=relog.uid, coins=relog.coins, gifts=relog.gifts, coin=relog.coin, gift=relog.gift, money=relog.money, platform=relog.platform, serial=relog.serial, time=relog.time, cid=relog.cid, chapter=relog.chapter, ext=jsonutils.loads_as_bytes(relog.ext) if relog.ext else None, ) ])
def recharges(self, req, uid, body=None): """用户订单列表""" body = body or {} uid = int(uid) session = endpoint_session(readonly=True) query = model_query(session, RechargeLog, filter=RechargeLog.uid == uid) query = query.order_by(RechargeLog.oid.desc()) return resultutils.results( result='show user recharge log success', data=[ dict( oid=relog.oid, sandbox=relog.sandbox, uid=relog.uid, coins=relog.coins, gifts=relog.gifts, coin=relog.coin, gift=relog.gift, money=relog.money, platform=relog.platform, serial=relog.serial, time=relog.time, cid=relog.cid, chapter=relog.chapter, ext=jsonutils.loads_as_bytes(relog.ext) if relog.ext else None, ) for relog in query ])
def unquote_version(self, req, group_id, objtype, entity, body=None): """区服包引用指定资源引用删除""" body = body or {} if objtype != common.GAMESERVER: raise InvalidArgument('Version unquote just for %s' % common.GAMESERVER) package_id = int(body.get('package_id')) group_id = int(group_id) entity = int(entity) session = endpoint_session() query = model_query(session, AppEntity, filter=AppEntity.entity == entity) quote = None with session.begin(): _entity = query.one() if _entity.objtype != objtype: raise InvalidArgument('Objtype not match') if _entity.group_id != group_id: raise InvalidArgument('Group id not match') versions = jsonutils.loads_as_bytes(_entity.versions) if _entity.versions else {} str_key = str(package_id) if str_key in versions: quote = versions.pop(str_key) cdnquote_controller.delete(req, quote.get('quote_id')) _entity.versions = jsonutils.dumps(versions) if versions else None session.flush() return resultutils.results(result='%s entity version unquote success' % objtype, data=[dict(version=quote.get('version') if quote else None, quote_id=quote.get('quote_id') if quote else None)])
def show(self, req, group_id, objtype, entity, body=None): body = body or {} group_id = int(group_id) entity = int(entity) session = endpoint_session(readonly=True) _format = body.get('format') or 'list' query = model_query(session, AppEntity, filter=AppEntity.entity == entity) query = query.options(joinedload(AppEntity.databases, innerjoin=False)) _entity = query.one() if _entity.objtype != objtype: raise InvalidArgument('Entity is not %s' % objtype) if _entity.group_id != group_id: raise InvalidArgument('Entity group %d not match %d' % (_entity.group_id, group_id)) metadata, ports = self._entityinfo(req, entity) if _format == 'list': databases = [] else: databases = {} for database in _entity.databases: dbinfo = dict(quote_id=database.quote_id, database_id=database.database_id, host=database.host, port=database.port, ro_user=database.ro_user, ro_passwd=database.ro_passwd, subtype=database.subtype, schema='%s_%s_%s_%d' % (common.NAME, objtype, database.subtype, entity)) if _format == 'list': databases.append(dbinfo) else: databases[database.subtype] = dbinfo return resultutils.results( result='show %s areas success' % objtype, data=[ dict(entity=_entity.entity, agent_id=_entity.agent_id, objtype=objtype, group_id=_entity.group_id, opentime=_entity.opentime, platform=_entity.platform, status=_entity.status, versions=jsonutils.loads_as_bytes(_entity.versions) if _entity.versions else None, areas=[ dict( area_id=area.area_id, gid=0, show_id=area.show_id, areaname=area.areaname.encode('utf-8'), ) for area in _entity.areas ], databases=databases, metadata=metadata, ports=ports) ])
def from_dict(cls, data): """Converts this from a dictionary to a object.""" if isinstance(data, basestring): data = jsonutils.loads_as_bytes(data) version = data.pop('version', None) if version != cls.DICT_VERSION: raise ValueError('Invalid dict version of failure object: %r' % version) causes = data.get('causes') if causes is not None: data['causes'] = tuple(cls.from_dict(d) for d in causes) return cls(**data)
def quote_version(self, req, group_id, objtype, entity, body=None): """区服包引用指定资源版本""" body = body or {} if objtype != common.GAMESERVER: raise InvalidArgument('Version quote just for %s' % common.GAMESERVER) package_id = int(body.get('package_id')) rversion = body.get('rversion') group_id = int(group_id) entity = int(entity) session = endpoint_session() query = model_query(session, Group, filter=Group.group_id == group_id) query = query.options(joinedload(Group.packages, innerjoin=False)) group = query.one() resource_id = None for package in group.packages: if package.package_id == package_id: resource_id = package.resource_id if not resource_id: raise InvalidArgument('Entity can not find package or package resource is None') query = model_query(session, AppEntity, filter=AppEntity.entity == entity) query = query.options(joinedload(AppEntity.areas, innerjoin=False)) with session.begin(): _entity = query.one() if _entity.objtype != objtype: raise InvalidArgument('Objtype not match') if _entity.group_id != group_id: raise InvalidArgument('Group id not match') if not model_count_with_key(session, PackageArea.package_id, filter=and_(PackageArea.package_id == package_id, PackageArea.area_id.in_([area.area_id for area in _entity.areas]) )): raise InvalidArgument('Entity area not in package areas') versions = jsonutils.loads_as_bytes(_entity.versions) if _entity.versions else {} str_key = str(package_id) if str_key in versions: quote = versions.get(str_key) if quote.get('version') != rversion: body = {'version': rversion} quote.update(body) cdnquote_controller.update(req, quote.get('quote_id'), body=body) else: qresult = cdnresource_controller.vquote(req, resource_id, body={'version': rversion, 'desc': '%s.%d' % (common.NAME, entity)}) quote = qresult['data'][0] quote = dict(version=rversion, quote_id=quote.get('quote_id')) versions.setdefault(str_key, quote) _entity.versions = jsonutils.dumps(versions) session.flush() return resultutils.results(result='set entity version quote success', data=[dict(resource_id=resource_id, version=rversion, quote_id=quote.get('quote_id'))])
def agents_metadata(self, agents): client = self.client key = self.ALL_AGENTS_KEY all_ids = client.zrange(name=key, start=0, end=-1, withscores=True) zsources = dict() for zsource in all_ids: zsources[int(zsource[0])] = int(zsource[1]) agents = set(agents) if agents - set(zsources.keys()): not_founds = [str(a) for a in (agents - set(zsources.keys()))] LOG.error('Agents of [%s] can not be found' % ','.join(not_founds)) raise InvalidArgument('Agents Can not find be found in %s' % self.ALL_AGENTS_KEY) now = int(time.time()) cached = set(self.metadatas.keys()) # agent has been deleted deleted = cached - agents if deleted: for agent_id in deleted: # remove target from metadata cache and cached list self.metatimes.pop(agent_id, None) self.metadatas.pop(agent_id, None) cached.remove(agent_id) # agent not in cache missed = list(agents - cached) for agent_id in cached: # cache overtime or baseline changed if self.metatimes[agent_id][0] != zsources[agent_id] \ or self.metatimes[agent_id][1] < now: missed.append(agent_id) if missed: pipe = self.client.pipeline() pipe.mget(*[self.host_online_key(agent_id) for agent_id in missed]) for agent_id in missed: pipe.ttl(self.host_online_key(agent_id)) ttls = pipe.execute() metadatas = ttls.pop(0) # metadatas = pipe.mget(*[self.host_online_key(agent_id) for agent_id in missed]) for index, metadata in enumerate(metadatas): agent_id = missed[index] if metadata: self.metatimes[agent_id] = (zsources[agent_id], now + ttls[index]) self.metadatas[agent_id] = jsonutils.loads_as_bytes(metadata) else: self.metatimes.pop(agent_id, None) self.metadatas.pop(agent_id, None) return self.metadatas
def decode_msgpack(raw_data, root_types=(dict,)): """Parse raw data to get decoded object. Decodes a msgback encoded 'blob' from a given raw data binary string and checks that the root type of that decoded object is in the allowed set of types (by default a dict should be the root type). """ try: data = jsonutils.loads_as_bytes(raw_data) except Exception as e: raise ValueError("Expected msgpack decodable data: %s" % e) else: return _check_decoded_type(data, root_types=root_types)
def _fetch_token_from_cache(self, token_id): cache_store = api.get_cache() # 从cache存储中获取token以及ttl pipe = cache_store.pipeline() pipe.multi() pipe.get(token_id) pipe.ttl(token_id) token, ttl = pipe.execute() # 过期时间小于15s, 认为已经过期 if not token or ttl < 15: raise exceptions.TokenExpiredError( 'Token has been expired drop from cache') token = jsonutils.loads_as_bytes(token) return token
def delete(self, req, token_id, checker=None): if self._is_fernet(req): token = self.fernet_formatter.unpack(token_id) if checker: checker(token) else: if not token_id.startswith(self.AUTH_PREFIX): raise InvalidArgument('Token id prefix error') cache_store = api.get_cache() token = cache_store.get(token_id) if token: token = jsonutils.loads_as_bytes(token) if checker: checker(token) cache_store.delete(token_id) return token
def _execute(self, paypal, money): money = '%.2f' % (money * self.roe) url = self.api + '/v1/payments/payment' + '/%s/execute' % paypal.get( 'paymentID') data = dict(payer_id=paypal.get('payerID'), transactions=[ dict(amount=dict(total=money, currency=self.currency)) ]) resp = self.session.post(url, auth=self.auth, json=data, headers={"Content-Type": "application/json"}, timeout=10) if LOG.isEnabledFor(logging.DEBUG): LOG.debug(resp.text) return jsonutils.loads_as_bytes(resp.text)
def show(self, req, key, body=None): if not key.startswith('-'.join([self.PREFIX, 'caches'])): raise InvalidArgument('Key prefix not match') if '*' in key: raise InvalidArgument('* in key!') cache = get_cache() data = cache.get(key) if data is None: return resultutils.results( result='Get cache fail, key not exist or expired', resultcode=manager_common.RESULT_ERROR) if data: data = jsonutils.loads_as_bytes(data) return resultutils.results(result='Delete cache success', data=[ data, ])
def expire(self, req, token_id, expire, checker=None): if self._is_fernet(req): token = self.fernet_formatter.unpack(token_id) if checker: checker(token) expire = token.get('expire') + expire token.update({'expire': expire}) token_id = self.fernet_formatter.pack(token) else: if not token_id.startswith(self.AUTH_PREFIX): raise InvalidArgument('Token id prefix error') cache_store = api.get_cache() token = cache_store.get(token_id) if not token: raise exceptions.TokenExpiredError('Token not exist now') token = jsonutils.loads_as_bytes(token) if checker: checker(token) cache_store.expire(token_id, expire) return token_id, token
def deserialize_remote_exception(data, allowed_remote_exmods): failure = jsonutils.loads_as_bytes(six.text_type(data)) trace = failure.get('tb', []) message = failure.get('message', "") + "\n" + "\n".join(trace) name = failure.get('class') module = failure.get('module') # NOTE(ameade): We DO NOT want to allow just any module to be imported, in # order to prevent arbitrary code execution. if module != _EXCEPTIONS_MODULE and module not in allowed_remote_exmods: return exceptions.RemoteError(name, failure.get('message'), trace) try: __import__(module) mod = sys.modules[module] klass = getattr(mod, name) if not issubclass(klass, Exception): raise TypeError("Can only deserialize Exceptions") failure = klass(*failure.get('args', []), **failure.get('kwargs', {})) except (AttributeError, TypeError, ImportError): return exceptions.RemoteError(name, failure.get('message'), trace) ex_type = type(failure) str_override = lambda self: message new_ex_type = type(ex_type.__name__ + _REMOTE_POSTFIX, (ex_type, ), { '__str__': str_override, '__unicode__': str_override }) new_ex_type.__module__ = '%s%s' % (module, _REMOTE_POSTFIX) try: # NOTE(ameade): Dynamically create a new exception type and swap it in # as the new type for the exception. This only works on user defined # Exceptions and not core Python exceptions. This is important because # we cannot necessarily change an exception message so we must override # the __str__ method. failure.__class__ = new_ex_type except TypeError: # NOTE(ameade): If a core exception then just add the traceback to the # first exception argument. failure.args = (message, ) + failure.args[1:] return failure
def payment(self, money, oid, req): money = round(money * self.roe, 2) data = OrderedDict() data['appid'] = self.appid data['waresid'] = self.waresid # data['waresname'] = 'comic' data['cporderid'] = str(oid) data['price'] = money data['currency'] = self._currency data['appuserid'] = self.appuid data['notifyurl'] = req.path_url + '/%d' % oid transdata = jsonutils.dumps_as_bytes(data) sign = self.mksign(transdata, self.signtype) LOG.debug('transdata is %s' % transdata) params = OrderedDict(transdata=transdata) params['sign'] = sign params['signtype'] = self.signtype resp = self.session.post(self.ORDERURL, data=urlencode(params), timeout=10) LOG.debug('response text %s' % str(resp.text)) results = IPayApi.decode(resp.text, self.TRANSDATA) transdata = jsonutils.loads_as_bytes(results.get(self.TRANSDATA)) if transdata.get('code'): LOG.error('ipay create payment fail %s, code %s' % (transdata.get('errmsg'), str(transdata.get('code')))) raise exceptions.CreateOrderError( 'Create ipay payment result is fail') LOG.debug('Create new payment success') transid = transdata.get('transid') sign = results.get('sign') signtype = results.get('signtype') if not self.verify(results.get(self.TRANSDATA), sign, signtype): raise exceptions.VerifyOrderError( 'RSA verify payment result sign error') return str(transid), self.ipay_url( transid), self.url_r or '', self.url_h or ''
def agent_metadata_flush(self, agent_id, metadata, expire): LOG.debug('try update agent metadata in global data') host = metadata.get('host') host_online_key = self.host_online_key(agent_id) pipe = self.client.pipeline() pipe.watch(host_online_key) pipe.multi() pipe.get(host_online_key) pipe.ttl(host_online_key) pipe.expire(host_online_key, expire or manager_common.ONLINE_EXIST_TIME) try: exist_agent_metadata, ttl, expire_result = pipe.execute() except WatchError: raise InvalidArgument('Host changed') if exist_agent_metadata is not None: exist_agent_metadata = jsonutils.loads_as_bytes(exist_agent_metadata) if exist_agent_metadata.get('host') != host: LOG.error('Host call online with %s, but %s alreday exist with same key' % (host, exist_agent_metadata.get('host'))) if ttl > 3: if not self.client.expire(host_online_key, ttl): LOG.error('Revet ttl of %s fail' % host_online_key) raise InvalidArgument('Agent %d with host %s alreday eixst' % (agent_id, exist_agent_metadata.get('host'))) else: # replace metadata if exist_agent_metadata != metadata: LOG.info('Agent %d metadata change' % agent_id) if not self.client.set(host_online_key, jsonutils.dumps_as_bytes(metadata), ex=expire or manager_common.ONLINE_EXIST_TIME): raise InvalidArgument('Another agent login with same host or ' 'someone set key %s' % host_online_key) self.client.zadd(self.ALL_AGENTS_KEY, int(time.time()), str(agent_id)) else: if not self.client.set(host_online_key, jsonutils.dumps_as_bytes(metadata), ex=expire or manager_common.ONLINE_EXIST_TIME, nx=True): raise InvalidArgument('Another agent login with same host or ' 'someone set key %s' % host_online_key) self.client.zadd(self.ALL_AGENTS_KEY, int(time.time()), str(agent_id))
def payment(self, money, cancel): money = '%.2f' % (money * self.roe) url = self.api + '/v1/payments/payment' data = dict(intent='sale', payer={'payment_method': 'paypal'}, transactions=[ dict(amount=dict(total=money, currency=self.currency)) ], redirect_urls={ "return_url": "http://www.163.com", "cancel_url": cancel }) resp = self.session.post(url, auth=self.auth, json=data, headers={"Content-Type": "application/json"}, timeout=10) payment = jsonutils.loads_as_bytes(resp.text) if payment.get('state') != 'created': raise exceptions.CreateOrderError('Create Paypal payment error') return payment['id']
def esure_notify(self, data, order): if LOG.isEnabledFor(logging.DEBUG): LOG.debug(data) data = WeiXinApi.decrypt_xml_to_dict(data) self._check_sign(data) if data.get('return_code') != 'SUCCESS': LOG.error('Esure WeiXin order api request fail: %s' % data.get('return_msg')) raise exceptions.EsureOrderError('Esure WeiXin order error, request error') if data.get('result_code') != 'SUCCESS': LOG.error('Esure WeiXin order result fail') raise exceptions.EsureOrderError('Esure WeiXin order error, result error') if data['out_trade_no'] != str(order.oid): LOG.error('Esure WeiXin order error, oid not the same') raise exceptions.EsureOrderError('Esure WeiXin order error, oid not the same') money = int(data['total_fee']) if money != order.money*100: LOG.warning('Money not the same! order %d, use value from order' % order.oid) try: prepay_id = jsonutils.loads_as_bytes(order.ext)['prepay_id'] if order.ext else None except Exception as e: LOG.error('Get prepay_id from order fail, %s' % e.__class__.__name__) prepay_id = None return data['transaction_id'], {'prepay_id': prepay_id}
def async_request(_request, agents=False, details=False): """this function just for route asynrequest show""" req_dict = {'request_id': _request.request_id, 'request_time': _request.request_time, 'finishtime': _request.finishtime, 'deadline': _request.deadline, 'status': _request.status, 'expire': _request.expire, 'resultcode': _request.resultcode, 'result': _request.result, 'respones': [] } # ret_dict = results(data=[req_dict, ], result='Get async request data finish') if _request.expire: req_dict['result'] += ',Data in cache,May miss some respone' if agents: if _request.expire: _cache = get_cache() key_pattern = targetutils.async_request_pattern(_request.request_id) respone_keys = _cache.keys(key_pattern) if respone_keys: agent_respones = _cache.mget(*respone_keys) if agent_respones: for agent_respone in agent_respones: if agent_respone: try: agent_respone_data = jsonutils.loads_as_bytes(agent_respone) except (TypeError, ValueError): continue if not details: agent_respone_data.pop('details', None) req_dict['respones'].append(agent_respone_data) else: for agent_data in _request.respones: req_dict['respones'].append(agent(agent_data, details=details)) return req_dict
def entitys(objtypes=None, group_ids=None, need_ok=False, packages=False): filters = [ AppEntity.objtype.in_(objtypes), AppEntity.status > common.DELETED ] if group_ids: filters.append( AppEntity.group_id.in_(argutils.map_to_int(group_ids))) session = endpoint_session(readonly=True) query = model_query(session, AppEntity, filter=and_(*filters)) query = query.options(joinedload(AppEntity.areas)) appentitys = query.all() entitys = set() for entity in appentitys: entitys.add(entity.entity) if not entitys: return [], [] # 反查渠道 if packages and common.GAMESERVER in objtypes: pmaps = {} pquery = model_query(session, Package) pquery = pquery.options(joinedload(Package.areas, innerjoin=False)) if group_ids: pquery = pquery.filter( Package.group_id.in_(argutils.map_to_int(group_ids))) def _pmaps(): for package in pquery: for parea in package.areas: try: pmaps[parea.area_id].append(package.package_name) except KeyError: pmaps[parea.area_id] = [ package.package_name, ] th = eventlet.spawn(_pmaps) emaps = entity_controller.shows(common.NAME, entitys, ports=True, metadata=True) if packages and common.GAMESERVER in objtypes: th.wait() chiefs = [] areas = [] for entity in appentitys: if need_ok and entity.status != common.OK: continue entityinfo = emaps.get(entity.entity) ports = entityinfo.get('ports') metadata = entityinfo.get('metadata') if not metadata: raise ValueError('Can not get agent metadata for %d' % entity.entity) if entity.objtype == common.GAMESERVER: for area in entity.areas: info = dict( area_id=area.area_id, show_id=area.show_id, areaname=area.areaname, entity=entity.entity, group_id=entity.group_id, opentime=entity.opentime, platform=entity.platform, status=entity.status, versions=jsonutils.loads_as_bytes(entity.versions) if entity.versions else None, external_ips=metadata.get('external_ips'), dnsnames=metadata.get('dnsnames'), port=ports[0]) if packages: info.setdefault('packagenames', pmaps.get(area.area_id, [])) areas.append(info) else: chiefs.append( dict(entity=entity.entity, objtype=entity.objtype, group_id=entity.group_id, ports=ports, local_ip=metadata.get('local_ip'), dnsnames=metadata.get('dnsnames'), external_ips=metadata.get('external_ips'))) return chiefs, areas
def deserialize_msg(msg): # if not isinstance(msg, dict): if isinstance(msg, dict): # See #2 above. return msg return jsonutils.loads_as_bytes(msg)
def swallow(self, req, entity, body=None): """合服内部接口,一般由agent调用 用于新实体吞噬旧实体的区服和数据库""" body = body or {} entity = int(entity) uuid = body.get('uuid') if not uuid: raise InvalidArgument('Merger uuid is None') session = endpoint_session() query = model_query(session, MergeTask, filter=MergeTask.uuid == uuid) query = query.options(joinedload(MergeTask.entitys, innerjoin=False)) glock = get_gamelock() rpc = get_client() with session.begin(): etask = query.one_or_none() if not etask: raise InvalidArgument('Not task exit with %s' % uuid) # 新实体不匹配 if etask.entity != body.get('entity'): raise InvalidArgument('New entity not %d' % etask.entity) # 找到目标实体 appentity = None for _entity in etask.entitys: if _entity.entity == entity: if _entity.status != common.MERGEING: if _entity.status != common.SWALLOWING: raise InvalidArgument( 'Swallow entity find status error') if not _entity.databases or not _entity.areas: raise InvalidArgument( 'Entity is swallowing but database or ares is None' ) LOG.warning('Entit is swallowing, return saved data') return resultutils.results( result='swallow entity is success', data=[ dict(databases=jsonutils.loads_as_bytes( _entity.databases), areas=jsonutils.loads_as_bytes( _entity.areas)) ]) _query = model_query(session, AppEntity, filter=AppEntity.entity == entity) _query = _query.options( joinedload(AppEntity.databases, innerjoin=False)) appentity = _query.one_or_none() break if not appentity: raise InvalidArgument('Can not find app entity?') if appentity.objtype != common.GAMESERVER: raise InvalidArgument('objtype error, entity not %s' % common.GAMESERVER) if appentity.status != common.MERGEING: raise InvalidArgument('find status error, when swallowing') databases = self._database_to_dict(appentity) areas = [area.to_dict() for area in appentity.areas] if not databases or not areas: LOG.error('Entity no areas or databases record') return resultutils.results( result='swallow entity fail, ' 'target entity can not found database or areas', resultcode=manager_common.RESULT_ERROR) with glock.grouplock(group=appentity.group_id): # 发送吞噬命令到目标区服agent metadata, ports = self._entityinfo(req=req, entity=entity) target = targetutils.target_agent_by_string( metadata.get('agent_type'), metadata.get('host')) target.namespace = common.NAME rpc_ret = rpc.call(target, ctxt={'agents': [ appentity.agent_id, ]}, msg={ 'method': 'swallow_entity', 'args': dict(entity=entity) }) if not rpc_ret: raise RpcResultError('swallow entity result is None') if rpc_ret.get('resultcode') != manager_common.RESULT_SUCCESS: raise RpcResultError('swallow entity fail %s' % rpc_ret.get('result')) # 修改实体在合服任务中的状态,存储areas以及databases appentity.status = common.SWALLOWING _entity.status = common.SWALLOWING _entity.areas = jsonutils.dumps(areas) _entity.databases = jsonutils.dumps(databases) session.flush() return resultutils.results( result='swallow entity is success', data=[dict(databases=databases, areas=areas)])
def swallowed(self, req, entity, body=None): """ 合服内部接口,一般由agent调用 用于新实体吞噬旧实体的区服完成后调用 调用后将设置appentity为deleted状态 """ body = body or {} entity = int(entity) uuid = body.get('uuid') if not uuid: raise InvalidArgument('Merger uuid is None') session = endpoint_session() query = model_query(session, MergeTask, filter=MergeTask.uuid == uuid) query = query.options(joinedload(MergeTask.entitys, innerjoin=False)) glock = get_gamelock() rpc = get_client() appentity = None with session.begin(): etask = query.one_or_none() if not etask: raise InvalidArgument('Not task exit with %s' % uuid) # 新实体不匹配 if etask.entity != body.get('entity'): raise InvalidArgument('New entity not %d' % etask.entity) for _entity in etask.entitys: if _entity.entity == entity: if _entity.status != common.SWALLOWING: raise InvalidArgument( 'Swallowed entity find status error') _query = model_query(session, AppEntity, filter=AppEntity.entity == entity) _query = _query.options( joinedload(AppEntity.databases, innerjoin=False)) appentity = _query.one_or_none() break if not appentity: raise InvalidArgument('Can not find app entity?') if appentity.objtype != common.GAMESERVER: raise InvalidArgument('objtype error, entity not %s' % common.GAMESERVER) if appentity.status != common.SWALLOWING: raise InvalidArgument('find status error, when swallowed') with glock.grouplock(group=appentity.group_id): # 发送吞噬完成命令到目标区服agent metadata, ports = self._entityinfo(req=req, entity=entity) target = targetutils.target_agent_by_string( metadata.get('agent_type'), metadata.get('host')) target.namespace = common.NAME rpc_ret = rpc.call(target, ctxt={'agents': [ appentity.agent_id, ]}, msg={ 'method': 'swallowed_entity', 'args': dict(entity=entity) }) if not rpc_ret: raise RpcResultError('swallowed entity result is None') if rpc_ret.get('resultcode') != manager_common.RESULT_SUCCESS: raise RpcResultError('swallowed entity fail %s' % rpc_ret.get('result')) # appentity状态修改为deleted appentity.status = common.DELETED # 修改实体在合服任务中的状态 _entity.status = common.MERGEED session.flush() # area绑定新实体 _query = model_query(session, GameArea, filter=GameArea.entity == entity) _query.update({'entity': etask.entity}) session.flush() def _unquote(): LOG.info('Swallowed %d finish, try unquote database' % appentity.entity) for database in appentity.databases: try: schema_controller.unquote(req, quote_id=database.quote_id) except Exception: LOG.error('Delete database quote fail') eventlet.spawn_n(_unquote) return resultutils.results( result='swallowed entity is success', data=[ dict(databases=jsonutils.loads_as_bytes(_entity.databases), areas=jsonutils.loads_as_bytes(_entity.areas)) ])
def safe_load(var): if var is None: return None return jsonutils.loads_as_bytes(var)
def index(self, req, group_id, objtype, body=None): body = body or {} group_id = int(group_id) order = body.pop('order', None) desc = body.pop('desc', False) detail = body.pop('detail', False) packages = body.pop('packages', False) page_num = int(body.pop('page_num', 0)) session = endpoint_session(readonly=True) columns = [ AppEntity.entity, AppEntity.group_id, AppEntity.agent_id, AppEntity.opentime, AppEntity.platform, AppEntity.versions, AppEntity.status, AppEntity.objtype ] joins = None if objtype == common.GAMESERVER: columns.append(AppEntity.areas) joins = joinedload(AppEntity.areas, innerjoin=False) if packages: joins = joins.joinedload(GameArea.packages, innerjoin=False) def _databases(): _maps = {} if objtype != common.GAMESERVER: return _maps query = model_query(session, AreaDatabase) for _db in query: dbinfo = dict( quote_id=_db.quote_id, subtype=_db.subtype, host=_db.host, port=_db.port, ) try: _maps[_db.entity].append(dbinfo) except KeyError: _maps[_db.entity] = [ dbinfo, ] return _maps if detail: dth = eventlet.spawn(_databases) results = resultutils.bulk_results(session, model=AppEntity, columns=columns, counter=AppEntity.entity, order=order, desc=desc, option=joins, filter=and_( AppEntity.group_id == group_id, AppEntity.objtype == objtype), page_num=page_num) if detail: dbmaps = dth.wait() if not results['data']: return results emaps = entity_controller.shows( endpoint=common.NAME, entitys=[column.get('entity') for column in results['data']]) for column in results['data']: entity = column.get('entity') entityinfo = emaps.get(entity) if detail: try: column['databases'] = dbmaps[entity] except KeyError: LOG.error('Entity %d lose database' % entity) if column['agent_id'] != entityinfo.get('agent_id'): raise RuntimeError( 'Entity agent id %d not the same as %d' % (column['agent_id'], entityinfo.get('agent_id'))) areas = column.pop('areas', []) if objtype == common.GAMESERVER: _areas = [] for area in areas: _area = dict(area_id=area.area_id, show_id=area.show_id, gid=0, areaname=area.areaname) if packages: _area.setdefault( 'packages', [parea.package_id for parea in area.packages]) _areas.append(_area) areas = _areas column['areas'] = areas column['ports'] = entityinfo.get('ports') metadata = entityinfo.get('metadata') if metadata: local_ip = metadata.get('local_ip') external_ips = metadata.get('external_ips') else: local_ip = external_ips = None column['local_ip'] = local_ip column['external_ips'] = external_ips versions = column.get('versions') if versions: column['versions'] = jsonutils.loads_as_bytes(versions) return results
def delete(self, req, group_id, objtype, entity, body=None): """标记删除entity""" body = body or {} force = body.get('force', False) group_id = int(group_id) entity = int(entity) session = endpoint_session() glock = get_gamelock() metadata, ports = self._entityinfo(req=req, entity=entity) if not metadata: raise InvalidArgument('Agent offline, can not delete entity') query = model_query(session, AppEntity, filter=AppEntity.entity == entity) if objtype == common.GAMESERVER: query = query.options(joinedload(AppEntity.areas, innerjoin=False)) _entity = query.one() if _entity.status == common.DELETED: return resultutils.results(result='mark %s entity delete success' % objtype, data=[ dict(entity=entity, objtype=objtype, ports=ports, metadata=metadata) ]) if _entity.objtype != objtype: raise InvalidArgument('Objtype not match') if _entity.group_id != group_id: raise InvalidArgument('Group id not match') target = targetutils.target_agent_by_string(metadata.get('agent_type'), metadata.get('host')) target.namespace = common.NAME rpc = get_client() with glock.grouplock(group=group_id): if objtype == common.GMSERVER: if model_count_with_key( session, AppEntity, filter=AppEntity.group_id == group_id) > 1: raise InvalidArgument( 'You must delete other objtype entity before delete gm' ) if model_count_with_key( session, Package, filter=Package.group_id == group_id) > 1: raise InvalidArgument( 'You must delete other Package before delete gm') elif objtype == common.CROSSSERVER: if model_count_with_key( session, AppEntity, filter=AppEntity.cross_id == _entity.entity): raise InvalidArgument('Cross server are reflected') with session.begin(): # 确认实体没有运行 rpc_ret = rpc.call(target, ctxt={'agents': [ _entity.agent_id, ]}, msg={ 'method': 'stoped', 'args': dict(entity=entity) }) if not rpc_ret: raise RpcResultError('check entity stoped result is None') if rpc_ret.get('resultcode') != manager_common.RESULT_SUCCESS: raise RpcResultError('check entity fail %s' % rpc_ret.get('result')) _entity.status = common.DELETED session.flush() if objtype == common.GAMESERVER: # 删除所有资源版本引用 if _entity.versions: for quote in six.itervalues( jsonutils.loads_as_bytes(_entity.versions)): threadpool.add_thread(cdnquote_controller.delete, req, quote.get('quote_id')) _entity.versions = None session.flush() if _entity.areas: if len(_entity.areas) > 1: raise InvalidArgument('%s areas more then one' % objtype) area = _entity.areas[0] if not force: if _entity.entity != model_max_with_key( session, AppEntity.entity, filter=and_( AppEntity.objtype == common.GAMESERVER, AppEntity.group_id == group_id)): raise InvalidArgument( 'entity %d is not the last gamesvr entity in group' % entity) session.flush() session.delete(area) session.flush() _query = model_query( session, PackageArea, filter=PackageArea.area_id == area.area_id) _query.delete() session.flush() rpc.cast(target, ctxt={'agents': [ _entity.agent_id, ]}, msg={ 'method': 'change_status', 'args': dict(entity=entity, status=common.DELETED) }) return resultutils.results(result='mark %s entity delete success' % objtype, data=[ dict(entity=entity, objtype=objtype, ports=ports, metadata=metadata) ])