def bluk_insert(storage, agents, bulk_data, expire=60): if bulk_data: request_id = bulk_data[0]['request_id'] agent_id = bulk_data[0]['agent_id'] if isinstance(storage, Session): with storage.begin(): for data in bulk_data: try: resp = AgentRespone(**data) storage.add(resp) storage.flush() except DBDuplicateEntry: agents.remove(agent_id) continue elif isinstance(storage, StrictRedis): for data in bulk_data: respone_key = targetutils.async_request_key( request_id, agent_id) if not storage.set(respone_key, jsonutils.dumps_as_bytes(data), ex=expire, nx=True): agents.remove(agent_id) else: raise NotImplementedError('bluk insert storage type error')
def show(self, req, md5, body=None): session = get_session(readonly=True) query = model_query(session, DownFile, filter=DownFile.md5 == md5) downfile = query.one_or_none() if not downfile: return resultutils.results(resultcode=manager_common.RESULT_ERROR, result='Get file fail, no found') file_info = { 'downloader': downfile.downloader, 'address': downfile.address, 'ext': downfile.ext, 'size': downfile.size, 'uploadtime': str(downfile.uploadtime), 'md5': downfile.md5, 'status': downfile.status, } if downfile.adapter_args: file_info.setdefault( 'adapter_args', jsonutils.dumps_as_bytes(downfile.adapter_args)) if downfile.desc: file_info.setdefault('desc', downfile.desc) resultcode = manager_common.RESULT_SUCCESS if downfile.status != manager_common.DOWNFILE_FILEOK: resultcode = manager_common.RESULT_ERROR return resultutils.results(result='Get file success', resultcode=resultcode, data=[ file_info, ])
def create(self, req, body=None): expire = int(body.get('expire') or 30) cache = get_cache() salt = ''.join(random.sample(string.lowercase, 6)) key = '-'.join([self.PREFIX, 'caches', str(int(time.time())), salt]) if not cache.set(key, jsonutils.dumps_as_bytes(body) if body else '', ex=expire or manager_common.ONLINE_EXIST_TIME, nx=True): raise CacheStoneError('Cache key value error') return resultutils.results(result='Make cache success', data=[key])
def agent_metadata_flush(self, agent_id, metadata, expire): LOG.debug('try update agent metadata in global data') host = metadata.get('host') host_online_key = self.host_online_key(agent_id) pipe = self.client.pipeline() pipe.watch(host_online_key) pipe.multi() pipe.get(host_online_key) pipe.ttl(host_online_key) pipe.expire(host_online_key, expire or manager_common.ONLINE_EXIST_TIME) try: exist_agent_metadata, ttl, expire_result = pipe.execute() except WatchError: raise InvalidArgument('Host changed') if exist_agent_metadata is not None: exist_agent_metadata = jsonutils.loads_as_bytes(exist_agent_metadata) if exist_agent_metadata.get('host') != host: LOG.error('Host call online with %s, but %s alreday exist with same key' % (host, exist_agent_metadata.get('host'))) if ttl > 3: if not self.client.expire(host_online_key, ttl): LOG.error('Revet ttl of %s fail' % host_online_key) raise InvalidArgument('Agent %d with host %s alreday eixst' % (agent_id, exist_agent_metadata.get('host'))) else: # replace metadata if exist_agent_metadata != metadata: LOG.info('Agent %d metadata change' % agent_id) if not self.client.set(host_online_key, jsonutils.dumps_as_bytes(metadata), ex=expire or manager_common.ONLINE_EXIST_TIME): raise InvalidArgument('Another agent login with same host or ' 'someone set key %s' % host_online_key) self.client.zadd(self.ALL_AGENTS_KEY, int(time.time()), str(agent_id)) else: if not self.client.set(host_online_key, jsonutils.dumps_as_bytes(metadata), ex=expire or manager_common.ONLINE_EXIST_TIME, nx=True): raise InvalidArgument('Another agent login with same host or ' 'someone set key %s' % host_online_key) self.client.zadd(self.ALL_AGENTS_KEY, int(time.time()), str(agent_id))
def serialize(self, data): """Serializes a dictionary into JSON. A dictionary with a single key can be passed and it can contain any structure. """ if data is None: return None elif isinstance(data, dict): return jsonutils.dumps_as_bytes(data) else: raise Exception("Unable to serialize object of type = '%s'" % type(data))
def create(self, req, token, expire): if self._is_fernet(req): token.update({'expire': expire + int(time.time())}) token_id = self.fernet_formatter.pack(token) else: cache_store = api.get_cache() token_id = '-'.join([ self.AUTH_PREFIX, str(uuidutils.generate_uuid()).replace('-', '') ]) if not cache_store.set( token_id, jsonutils.dumps_as_bytes(token), ex=expire, nx=True): LOG.error('Cache token fail') raise exceptions.CacheStoneError('Set to cache store fail') req.environ[manager_common.TOKENNAME] = token return token_id
def ipay_url(self, transid): if not self.h5: return '' if not self.url_h or not self.url_r: raise ValueError('Ipay with h5 need success url and fail url') data = OrderedDict() data['tid'] = transid data['app'] = self.appid data['url_r'] = self.url_r data['url_h'] = self.url_h data = jsonutils.dumps_as_bytes(data) return IPayApi.GWURL + '?' + urlencode( dict(data=data, sign=self.mksign(data, self.signtype), sign_type=self.signtype))
def payment(self, money, oid, req): money = round(money * self.roe, 2) data = OrderedDict() data['appid'] = self.appid data['waresid'] = self.waresid # data['waresname'] = 'comic' data['cporderid'] = str(oid) data['price'] = money data['currency'] = self._currency data['appuserid'] = self.appuid data['notifyurl'] = req.path_url + '/%d' % oid transdata = jsonutils.dumps_as_bytes(data) sign = self.mksign(transdata, self.signtype) LOG.debug('transdata is %s' % transdata) params = OrderedDict(transdata=transdata) params['sign'] = sign params['signtype'] = self.signtype resp = self.session.post(self.ORDERURL, data=urlencode(params), timeout=10) LOG.debug('response text %s' % str(resp.text)) results = IPayApi.decode(resp.text, self.TRANSDATA) transdata = jsonutils.loads_as_bytes(results.get(self.TRANSDATA)) if transdata.get('code'): LOG.error('ipay create payment fail %s, code %s' % (transdata.get('errmsg'), str(transdata.get('code')))) raise exceptions.CreateOrderError( 'Create ipay payment result is fail') LOG.debug('Create new payment success') transid = transdata.get('transid') sign = results.get('sign') signtype = results.get('signtype') if not self.verify(results.get(self.TRANSDATA), sign, signtype): raise exceptions.VerifyOrderError( 'RSA verify payment result sign error') return str(transid), self.ipay_url( transid), self.url_r or '', self.url_h or ''
def edit(self, req, agent_id, body=None): """call by agent""" # TODO check data in body body = body or {} ports_range = body.pop('ports_range', []) if ports_range: body.setdefault('ports_range', jsonutils.dumps_as_bytes(ports_range)) session = get_session() glock = get_global().lock('agents') with glock([agent_id, ]): data = body if not data: raise InvalidInput('Not data exist') with session.begin(): query = model_query(session, Agent, Agent.agent_id == agent_id) query.update(data) result = resultutils.results(pagenum=0, result='Update agent success', data=[body, ]) return result
def _set_to_mysql(self, value): return jsonutils.dumps_as_bytes(list(value))
def _dict_to_mysql(self, value): return jsonutils.dumps_as_bytes(value)
def do_GET(self): # 禁止通过相对路径回退 if '..' in self.path: raise ValueError('Path value is illegal') path = self.translate_path(self.path) # 禁止根目录 if path == '/': raise ValueError('Home value error') # 校验token try: if fetch_token(self.path, self.headers) != CONF.token: self.logger.error('Token not match') self.send_error(401, "Token not match") return None except exceptions.WebSocketError as e: self.send_error(405, e.message) return None if not self.handle_websocket(): # 普通的http get方式 if self.only_upgrade: self.send_error(405, "Method Not Allowed") else: # 如果path是文件夹,允许列出文件夹 if os.path.isdir(path): self.logger.info('handle websocket finish target is path') _path = self.path.split('?', 1)[0] parameters = self.path[len(_path):] _path = _path.split('#', 1)[0] if not _path.endswith('/'): # redirect browser - doing basically what apache does _path = _path + "/" + parameters self.send_response(301) self.send_header("Location", _path) self.end_headers() return None try: filelist = os.listdir(path) except os.error: self.send_error(404, "No permission to list directory") return None _filelist = [] filelist.sort(key=lambda a: a.lower()) f = StringIO() for name in filelist: fullname = os.path.join(path, name) displayname = name if os.path.isdir(fullname): displayname = name + "/" if os.path.islink(fullname): displayname = name + "@" _filelist.append(cgi.escape(displayname)) # 文件夹列表生成json buf = jsonutils.dumps_as_bytes(_filelist) self.send_response(200) self.send_header( "Content-type", "application/json; charset=%s" % systemutils.SYSENCODE) self.send_header("Content-Length", len(buf)) self.end_headers() self.wfile.write(buf) return f.close() else: self.send_error(405, "Method Not Allowed")
def agentrespone(storage, request_id, data): """agent report respone api""" jsonutils.schema_validate(data, RESPONESCHEMA) agent_id = data.get('agent_id') agent_time = data.get('agent_time') resultcode = data.get('resultcode') result = data.get('result', 'no result message') expire = data.get('expire', 60) details = [ dict(detail_id=detail['detail_id'], resultcode=detail['resultcode'], result=detail['result']) for detail in data.get('details', []) ] data = dict( agent_id=agent_id, agent_time=agent_time, server_time=int(time.time()), resultcode=resultcode, result=result, ) if isinstance(storage, Session): try: with storage.begin(): data.setdefault('request_id', request_id) storage.add(AgentRespone(**data)) storage.flush() for detail in details: detail.update( dict(agent_id=agent_id, request_id=request_id)) detail_result = detail.pop('result') if not isinstance(detail_result, basestring): detail_result = jsonutils.dumps_as_bytes(detail_result) detail.setdefault('result', detail_result) storage.add(ResponeDetail(**detail)) storage.flush() except DBDuplicateEntry: query = model_query(storage, AgentRespone, filter=and_( AgentRespone.request_id == request_id, AgentRespone.agent_id == agent_id)) with storage.begin(subtransactions=True): respone = query.one() if respone.resultcode != manager_common.RESULT_OVER_FINISHTIME: result = 'Agent %d respone %s fail,another agent with same agent_id in database' % \ (agent_id, request_id) return resultutils.results( result=result, resultcode=manager_common.RESULT_ERROR) query.update(data) elif isinstance(storage, StrictRedis): data.setdefault('details', details) respone_key = targetutils.async_request_key(request_id, agent_id) try: if not storage.set(respone_key, jsonutils.dumps_as_bytes(data), ex=expire, nx=True): respone = jsonutils.loads_as_bytes(storage.get(respone_key)) if respone.get( 'resultcode') != manager_common.RESULT_OVER_FINISHTIME: result = 'Agent %d respone %s fail,another agent ' \ 'with same agent_id in redis' % (agent_id, request_id) return resultutils.results( result=result, resultcode=manager_common.RESULT_ERROR) # overwirte respone_key storage.set(respone_key, jsonutils.dumps_as_bytes(data), ex=expire, nx=False) except RedisError: result = 'Agent %d respne %s fail, write to redis fail' % \ (agent_id, request_id) return resultutils.results(result=result, resultcode=manager_common.RESULT_ERROR) else: raise NotImplementedError('respone storage type error') return resultutils.results(result='Agent %d Post respone of %s success' % (agent_id, request_id))