async def request(self, route, *, files=None, form=None, **kwargs): bucket = route.bucket method = route.method url = route.url lock = self._locks.get(bucket) if lock is None: lock = asyncio.Lock() if bucket is not None: self._locks[bucket] = lock # header creation headers = { "User-Agent": self.user_agent, "X-Ratelimit-Precision": "millisecond", } if self.token is not None: headers[ "Authorization"] = "Bot " + self.token if self.bot_token else self.token # some checking if it's a JSON request if "json" in kwargs: headers["Content-Type"] = "application/json" kwargs["data"] = discord.utils.to_json(kwargs.pop("json")) try: reason = kwargs.pop("reason") except KeyError: pass else: if reason: headers["X-Audit-Log-Reason"] = urllib.parse.quote(reason, safe="/ ") kwargs["headers"] = headers # Proxy support if self.proxy is not None: kwargs["proxy"] = self.proxy if self.proxy_auth is not None: kwargs["proxy_auth"] = self.proxy_auth if not self._global_over.is_set(): # wait until the global lock is complete await self._global_over.wait() await lock.acquire() with discord.http.MaybeUnlock(lock) as maybe_lock: for tries in range(5): if files: for f in files: f.reset(seek=tries) if form: form_data = aiohttp.FormData() for params in form: form_data.add_field(**params) kwargs["data"] = form_data try: timer = Timer() async with self._HTTPClient__session.request( method, url, **kwargs) as r: response_time = timer.stop() # even errors have text involved in them so this is safe to call data = await discord.http.json_or_text(r) sanitized_url = sanitize_url(url) without_ids = remove_ids(sanitized_url) api_histogram.labels( method=method, path=without_ids).observe(response_time) if (IGNORE_PATH not in url) or r.status not in [200, 204]: http_logger.debug( f"{method} {url} returned: {r.status}", extra={ "http": { "method": method, "path": sanitized_url, "payload": kwargs.get("data"), "status": r.status, "response": data if 300 > r.status >= 200 else None, } }, ) # check if we have rate limit header information remaining = r.headers.get("X-Ratelimit-Remaining") if remaining == "0" and r.status != 429: # we've depleted our current bucket delta = discord.utils._parse_ratelimit_header( r, use_clock=self.use_clock) http_logger.debug( f"A rate limit bucket has been exhausted (bucket: {bucket}, retry: {delta})." ) maybe_lock.defer() self.loop.call_later(delta, lock.release) # the request was successful so just return the text/json if 300 > r.status >= 200: return data # we are being rate limited if r.status == 429: if not r.headers.get("Via"): # Banned by Cloudflare more than likely. raise discord.errors.HTTPException(r, data) # sleep a bit retry_after = data["retry_after"] / 1000.0 http_logger.warning( f"We are being rate limited. Retrying in {retry_after:.2f} " f'seconds. Handled under the bucket "{bucket}"') # check if it's a global rate limit is_global = data.get("global", False) if is_global: http_logger.warning( f"Global rate limit has been hit. " f"Retrying in {retry_after:.2f} seconds.") self._global_over.clear() await asyncio.sleep(retry_after) http_logger.debug( "Done sleeping for the rate limit. Retrying...") # release the global lock now that the # global rate limit has passed if is_global: self._global_over.set() http_logger.debug("Global rate limit is now over.") continue # we've received a 500 or 502, unconditional retry if r.status in {500, 502}: await asyncio.sleep(1 + tries * 2) continue # the usual error cases if r.status == 403: raise discord.errors.Forbidden(r, data) elif r.status == 404: raise discord.errors.NotFound(r, data) elif r.status == 503: raise discord.errors.DiscordServerError(r, data) else: raise discord.errors.HTTPException(r, data) # This is handling exceptions from the request except OSError as e: # Connection reset by peer if tries < 4 and e.errno in (54, 10054): continue raise # We've run out of retries, raise. if r.status >= 500: raise discord.errors.DiscordServerError(r, data) raise discord.errors.HTTPException(r, data)
async def request(self, verb, url, payload=None, multipart=None, *, files=None, reason=None): headers = {} data = None files = files or [] if payload: headers['Content-Type'] = 'application/json' data = utils.to_json(payload) if reason: headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ') base_url = url.replace(self._request_url, '/') or '/' _id = self._webhook_id for tries in range(5): for file in files: file.reset(seek=tries) if multipart: data = aiohttp.FormData() for key, value in multipart.items(): if key.startswith('file'): data.add_field(key, value[1], filename=value[0], content_type=value[2]) else: data.add_field(key, value) async with self.session.request(verb, url, headers=headers, data=data) as r: log.debug('Webhook ID %s with %s %s has returned status code %s', _id, verb, base_url, r.status) # Coerce empty strings to return None for hygiene purposes response = (await r.text(encoding='utf-8')) or None if r.headers['Content-Type'] == 'application/json': response = json.loads(response) # check if we have rate limit header information remaining = r.headers.get('X-Ratelimit-Remaining') if remaining == '0' and r.status != 429: delta = utils._parse_ratelimit_header(r) log.debug('Webhook ID %s has been pre-emptively rate limited, waiting %.2f seconds', _id, delta) await asyncio.sleep(delta) if 300 > r.status >= 200: return response # we are being rate limited if r.status == 429: if not r.headers.get('Via'): # Banned by Cloudflare more than likely. raise HTTPException(r, data) retry_after = response['retry_after'] / 1000.0 log.warning('Webhook ID %s is rate limited. Retrying in %.2f seconds', _id, retry_after) await asyncio.sleep(retry_after) continue if r.status in (500, 502): await asyncio.sleep(1 + tries * 2) continue if r.status == 403: raise Forbidden(r, response) elif r.status == 404: raise NotFound(r, response) else: raise HTTPException(r, response) # no more retries if r.status >= 500: raise DiscordServerError(r, response) raise HTTPException(r, response)
async def jjcsearch_async(self, def_lst: list, retry: int = 0) -> str: headers = { 'User-Agent': ('Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/78.0.3904.87 Safari/537.36'), 'X-From': 'https://nomae.net/arenadb/', 'Authority': 'nomae.net', } req = aiohttp.FormData() req.add_field('type', 'search') req.add_field('userid', 0) req.add_field('public', 1) for item in def_lst: req.add_field('def[]', item) req.add_field('page', 0) req.add_field('sort', 0) try: async with aiohttp.request( 'POST', 'https://nomae.net/princess_connect/public/_arenadb/receive.php', headers=headers, data=req) as resp: restxt = await resp.text() except aiohttp.ClientError as e: return '错误' + str(e) try: solution = json.loads(restxt) except json.JSONDecodeError as e: if retry >= 2: return '服务器错误,请稍后再试' else: return await self.jjcsearch_async(def_lst, retry + 1) if len(solution) == 0: return '没有找到公开的解法' page = await render_template( 'jjc-solution.html', len=len, solution=solution, def_lst=def_lst, jpname2id=self.jpname2id, parse_team=_parse_team, public_base=self.setting["public_basepath"], ) output_foler = os.path.join(self.setting['dirname'], 'output') num = len(os.listdir(output_foler)) + 1 os.mkdir(os.path.join(output_foler, str(num))) filename = 'solution-{}.html'.format(random.randint(0, 999)) with open(os.path.join(output_foler, str(num), filename), 'w', encoding='utf-8') as f: f.write(page) addr = urljoin( self.setting['public_address'], '{}output/{}/{}'.format(self.setting['public_basepath'], num, filename)) reply = '找到{}条解法:{}'.format(len(solution), addr) if self.setting['web_mode_hint']: reply += '\n\n如果连接无法打开,请仔细阅读教程中《链接无法打开》的说明' return reply
async def request(self, route, *, files=None, form=None, **kwargs): bucket = route.bucket method = route.method url = route.url lock = self._locks.get(bucket) if lock is None: lock = asyncio.Lock() if bucket is not None: self._locks[bucket] = lock # header creation headers = { 'User-Agent': self.user_agent, 'X-Ratelimit-Precision': 'millisecond', } if self.token is not None: headers[ 'Authorization'] = 'Bot ' + self.token if self.bot_token else self.token # some checking if it's a JSON request if 'json' in kwargs: headers['Content-Type'] = 'application/json' kwargs['data'] = utils.to_json(kwargs.pop('json')) try: reason = kwargs.pop('reason') except KeyError: pass else: if reason: headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ') kwargs['headers'] = headers # Proxy support if self.proxy is not None: kwargs['proxy'] = self.proxy if self.proxy_auth is not None: kwargs['proxy_auth'] = self.proxy_auth if not self._global_over.is_set(): # wait until the global lock is complete await self._global_over.wait() await lock.acquire() with MaybeUnlock(lock) as maybe_lock: for tries in range(5): if files: for f in files: f.reset(seek=tries) if form: form_data = aiohttp.FormData() for params in form: form_data.add_field(**params) kwargs['data'] = form_data try: async with self.__session.request(method, url, **kwargs) as r: log.debug('%s %s with %s has returned %s', method, url, kwargs.get('data'), r.status) # even errors have text involved in them so this is safe to call data = await json_or_text(r) # check if we have rate limit header information remaining = r.headers.get('X-Ratelimit-Remaining') if remaining == '0' and r.status != 429: # we've depleted our current bucket delta = utils._parse_ratelimit_header( r, use_clock=self.use_clock) log.debug( 'A rate limit bucket has been exhausted (bucket: %s, retry: %s).', bucket, delta) maybe_lock.defer() self.loop.call_later(delta, lock.release) # the request was successful so just return the text/json if 300 > r.status >= 200: log.debug('%s %s has received %s', method, url, data) return data # we are being rate limited if r.status == 429: if not r.headers.get('Via'): # Banned by Cloudflare more than likely. raise HTTPException(r, data) fmt = 'We are being rate limited. Retrying in %.2f seconds. Handled under the bucket "%s"' # sleep a bit retry_after = data['retry_after'] / 1000.0 log.warning(fmt, retry_after, bucket) # check if it's a global rate limit is_global = data.get('global', False) if is_global: log.warning( 'Global rate limit has been hit. Retrying in %.2f seconds.', retry_after) self._global_over.clear() await asyncio.sleep(retry_after) log.debug( 'Done sleeping for the rate limit. Retrying...' ) # release the global lock now that the # global rate limit has passed if is_global: self._global_over.set() log.debug('Global rate limit is now over.') continue # we've received a 500 or 502, unconditional retry if r.status in {500, 502}: await asyncio.sleep(1 + tries * 2) continue # the usual error cases if r.status == 403: raise Forbidden(r, data) elif r.status == 404: raise NotFound(r, data) elif r.status == 503: raise DiscordServerError(r, data) else: raise HTTPException(r, data) # This is handling exceptions from the request except OSError as e: # Connection reset by peer if tries < 4 and e.errno in (54, 10054): continue raise # We've run out of retries, raise. if r.status >= 500: raise DiscordServerError(r, data) raise HTTPException(r, data)
async def saucenao(self, ctx, url: modding.URLConverter() = None): ''' `>>saucenao <optional: either uploaded image or url>` Find the sauce of the image. If no argument is provided, wait 2 minutes for uploaded image. ''' if not url: if not ctx.message.attachments: msg = await ctx.send( "You want sauce of what? Post the dang url or upload the dang pic here." ) try: message = await self.bot.wait_for( "message", check=lambda m: m.author.id == ctx.author.id and (m.attachments or m.content), timeout=120) except asyncio.TimeoutError: return await msg.edit( content="That's it, I'm not waiting anymore.") else: message = ctx.message if message.attachments: url = URL(message.attachments[0].url) else: url = await modding.URLConverter().convert( ctx, message.content) await ctx.trigger_typing() payload = aiohttp.FormData() payload.add_field("file", b"", filename="", content_type="application/octet-stream") payload.add_field("url", str(url)) payload.add_field("frame", "1") payload.add_field("hide", "0") payload.add_field("database", "999") async with self.bot.session.post( "https://saucenao.com/search.php", headers={"User-Agent": request.USER_AGENT}, data=payload) as response: bytes_ = await response.read() data = BS(bytes_.decode("utf-8"), "lxml") result = [] hidden_result = [] for tag in data.find_all( lambda x: x.name == "div" and x.get("class") in [["result"], ["result", "hidden"]] and not x.get("id")): content = tag.find("td", class_="resulttablecontent") title_tag = content.find("div", class_="resulttitle") if title_tag: for br in title_tag.find_all("br"): br.replace_with("\n") try: title = title_tag.get_text().strip().splitlines()[0] except IndexError: title = "no title" else: result_content = tag.find("div", class_="resultcontent") for br in result_content.find_all("br"): br.replace_with("\n") title = utils.get_element( result_content.get_text().strip().splitlines(), 0, default="No title") similarity = content.find("div", class_="resultsimilarityinfo").text content_url = content.find("a", class_="linkify") if not content_url: content_url = content.find("div", class_="resultmiscinfo").find("a") if content_url: r = { "title": title, "similarity": similarity, "url": content_url["href"] } else: r = {"title": title, "similarity": similarity, "url": ""} if "hidden" in tag["class"]: hidden_result.append(r) else: result.append(r) if result: embed = discord.Embed( title="Sauce found?", description="\n".join( (f"[{r['title']} ({r['similarity']})]({r['url']})" for r in result))) embed.set_footer(text="Powered by https://saucenao.com") await ctx.send(embed=embed) else: msg = await ctx.send("No result found.") if hidden_result: sentences = { "initial": "Do you want to show low similarity results?" } result = await ctx.yes_no_prompt(sentences, delete_mode=True) if result: await msg.delete() embed = discord.Embed( title="Low similarity results:", description="\n".join( (f"[{r['title']} ({r['similarity']})]({r['url']})" for r in hidden_result))) embed.set_footer(text="Powered by https://saucenao.com") await ctx.send(embed=embed)
async def request( self, route: Route, session: aiohttp.ClientSession, *, payload: Optional[Dict[str, Any]] = None, multipart: Optional[List[Dict[str, Any]]] = None, files: Optional[List[File]] = None, reason: Optional[str] = None, auth_token: Optional[str] = None, params: Optional[Dict[str, Any]] = None, ) -> Any: headers: Dict[str, str] = {} files = files or [] to_send: Optional[Union[str, aiohttp.FormData]] = None bucket = (route.webhook_id, route.webhook_token) try: lock = self._locks[bucket] except KeyError: self._locks[bucket] = lock = asyncio.Lock() if payload is not None: headers['Content-Type'] = 'application/json' to_send = utils.to_json(payload) if auth_token is not None: headers['Authorization'] = f'Bot {auth_token}' if reason is not None: headers['X-Audit-Log-Reason'] = urlquote(reason, safe='/ ') response: Optional[aiohttp.ClientResponse] = None data: Optional[Union[Dict[str, Any], str]] = None method = route.method url = route.url webhook_id = route.webhook_id async with AsyncDeferredLock(lock) as lock: for attempt in range(5): for file in files: file.reset(seek=attempt) if multipart: form_data = aiohttp.FormData() for p in multipart: form_data.add_field(**p) to_send = form_data try: async with session.request(method, url, data=to_send, headers=headers, params=params) as response: log.debug( 'Webhook ID %s with %s %s has returned status code %s', webhook_id, method, url, response.status, ) data = (await response.text(encoding='utf-8')) or None if data and response.headers['Content-Type'] == 'application/json': data = json.loads(data) remaining = response.headers.get('X-Ratelimit-Remaining') if remaining == '0' and response.status != 429: delta = utils._parse_ratelimit_header(response) log.debug( 'Webhook ID %s has been pre-emptively rate limited, waiting %.2f seconds', webhook_id, delta ) lock.delay_by(delta) if 300 > response.status >= 200: return data if response.status == 429: if not response.headers.get('Via'): raise HTTPException(response, data) retry_after: float = data['retry_after'] # type: ignore log.warning('Webhook ID %s is rate limited. Retrying in %.2f seconds', webhook_id, retry_after) await asyncio.sleep(retry_after) continue if response.status >= 500: await asyncio.sleep(1 + attempt * 2) continue if response.status == 403: raise Forbidden(response, data) elif response.status == 404: raise NotFound(response, data) else: raise HTTPException(response, data) except OSError as e: if attempt < 4 and e.errno in (54, 10054): await asyncio.sleep(1 + attempt * 2) continue raise if response: if response.status >= 500: raise DiscordServerError(response, data) raise HTTPException(response, data) raise RuntimeError('Unreachable code in HTTP handling.')
async def s3_obj_worker(name, queue, session, bucket, context, chunk_size, no_results): url = f'{host}/files/fileSearchContext.mask' while True: obj = await queue.get() logging.info('%s: Starting task...', name) file_name = obj.key await obj.load() # Load the metadata for this object. content_type = obj.meta.data.get('ContentType', 'application/octet-stream') logging.info('%s: Processing "%s"...', name, file_name) logging.info('%s: Content type: %s', name, content_type) if content_type.startswith('application/x-directory')\ or file_name.startswith('darkshield-masked')\ or file_name.startswith('darkshield-results'): logging.info('%s: Skipping "%s"...', name, file_name) else: data = aiohttp.FormData() data.add_field('context', context, filename='context', content_type='application/json') data.add_field('file', s3_object_sender(obj, chunk_size), filename=file_name, content_type=content_type) logging.info('%s: Sending request to API...', name) async with session.post(url, data=data) as r: if r.status != 200: logging.error('%s: Failed to mask with error code %d: %s', name, r.status, await r.content.read()) else: logging.info('%s: Processing response...', name) reader = aiohttp.MultipartReader.from_response(r) part = await reader.next() while part is not None: if part.name == 'file': target = f'darkshield-masked/{file_name}' logging.info('%s: Uploading to "%s"...', name, target) config = TransferConfig( multipart_threshold=chunk_size) await bucket.upload_fileobj(PartReader(part), target, Config=config) elif part.name == 'results' and not no_results: file_name = file_name.replace('.', '_') target = f'darkshield-results/{file_name}-results.json' logging.info('%s: Uploading to "%s"...', name, target) config = TransferConfig( multipart_threshold=chunk_size) await bucket.upload_fileobj(PartReader(part), target, Config=config) part = await reader.next() logging.info('%s: Processed "%s".', name, file_name) queue.task_done() logging.info('%s: Task completed.', name)
async def run(args, i): headers = service_auth_headers(deploy_config, 'workshop', authorize_target=False) async with client_session() as session: # make sure notebook is up async with session.get(deploy_config.url('workshop', ''), headers=headers) as resp: await resp.text() log.info(f'{i} loaded notebook home page') # log in as workshop guest # get csrf token async with session.get(deploy_config.url('workshop', '/login'), headers=headers) as resp: pass data = aiohttp.FormData() data.add_field(name='name', value=args.workshop) data.add_field(name='password', value=args.password) data.add_field(name='_csrf', value=get_cookie(session, '_csrf')) async with session.post(deploy_config.url('workshop', '/login'), data=data, headers=headers) as resp: pass log.info(f'{i} logged in') # create notebook # get csrf token async with session.get(deploy_config.url('workshop', '/notebook'), headers=headers) as resp: pass data = aiohttp.FormData() data.add_field(name='_csrf', value=get_cookie(session, '_csrf')) async with session.post(deploy_config.url('workshop', '/notebook'), data=data, headers=headers) as resp: pass log.info(f'{i} created notebook') start = time.time() # wait for notebook ready ready = False attempt = 0 # 5 attempts overkill, should only take 2: Scheduling => Running => Ready while not ready and attempt < 5: async with session.ws_connect(deploy_config.url('workshop', '/notebook/wait', base_scheme='ws'), headers=headers) as ws: async for msg in ws: if msg.data == '1': ready = True attempt += 1 end = time.time() duration = end - start log.info(f'{i} notebook state {ready} duration {duration}') # delete notebook # get csrf token async with session.get(deploy_config.url('workshop', '/notebook'), headers=headers) as resp: pass data = aiohttp.FormData() data.add_field(name='_csrf', value=get_cookie(session, '_csrf')) async with session.post(deploy_config.url('workshop', '/notebook/delete'), data=data, headers=headers) as resp: pass log.info(f'{i} notebook delete, done.') return duration, ready
async def test_single_account(self, account): if isinstance(account, bytes): account = str(account, 'utf-8') account_json = json.loads(account) username = account_json['username'] password = account_json['password'] # username = '******' # password = '******' # username = "******" # password = "******" # username = "******" # password = "******" # username = "******" # password = "******" # 代理 proxy = await ProxyPool.get_proxy() # proxy = None params = { 'appid': '22003', 'appName': 'WEB-i.xunlei.com', 'deviceModel': 'chrome/70.0.3538.77', 'deviceName': 'PC-Chrome', 'OSVersion': 'Linux x86_64', 'provideNname': 'NONE', 'netWorkType': 'NONE', 'providerName': 'NONE', 'sdkVersion': 'v3.5.0', 'clientVersion': '1.1.1', 'protocolVersion': '301', 'devicesign': 'wdi10.0ae86ea19ff457567ceb4b707cc0c0ff44e8bc57e630ebdf7efa1a5b92b1bb1f', # 固定设备信息 'platformVersion': '1', 'fromPlatformVersion': '1', 'format': 'cookie', # 'timestamp': '1542182400516', 'userName': username, 'passWord': password, 'isMd5Pwd': '0', # 'creditkey': 'ck0.I8wdaBo1tx0TtyB5kbpHVsT6ncgkgyVnJmbAxk0w-abZX6A-48svawbqu9vyQtCMwmIRpH-JKeybMwbyYTSuZg' # 可以不存在 } content_type, body = self.encode_multipart_formdata(params) self.headers = { 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.84 Safari/537.36', 'Content-Type': content_type, 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', # 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'zh-CN,zh;q=0.9', 'Cache-Control': 'max-age=0', 'Connection': 'keep-alive', # 'Content-Length': '2251', 'Upgrade-Insecure-Requests': '1', } self.headers2 = { 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.84 Safari/537.36', } data = aiohttp.FormData() data.add_field('file', body, content_type=content_type) try: async with aiohttp.ClientSession() as client: # login url = 'https://login.xunlei.com/xluser.core.login/v3/login' async with client.post(url, data=data, headers=self.headers, proxy=proxy, timeout=30) as rsp: # print(rsp.status) # print(rsp.cookies) deviceid = rsp.cookies['deviceid'].value[0:32] csrf_token = SecretUtils.md5(deviceid) # getAc15615428418Info # print("-"*40) url = "15615428418ttps://xluser-ssl.xunlei.com/xlcenter/v1/GetAccInfo?" \ f"csrf_token={csrf_token}" async with client.get(url, headers=self.headers2, proxy=proxy, timeout=30) as rsp: print(rsp.status) acc_info = await rsp.json() print(acc_info) # getVipInfo # print("-"*40) vip_url = "https://xluser-ssl.xunlei.com/xlcenter/v1/" \ "GetAllVipInfo?" \ f"csrf_token={csrf_token}" # print([{cookie.key: cookie.value} # for cookie in client.cookie_jar]) async with client.get(vip_url, headers=self.headers2, proxy=proxy, timeout=30) as rsp: vip_info = await rsp.json() print(vip_info) except Exception as e: print(str(e)) return (1, account) # 判断账号是否可用 if vip_info.get('code', 0) == 200: print("成功") return (0, account) else: return (1, account)
async def proxy(self, request: aiohttp.web.Request): """Handles each proxy request""" request_url = "/" + request.match_info["path"] now = time.time() target_url = urllib.parse.urljoin(self.proxy_url, request_url) if self.ipheader: remote_ip = request.headers.get(self.ipheader, request.remote) else: remote_ip = request.remote if self.debug: print(f"Proxying request to {target_url}..." ) # This can get spammy, default is to not show it. if request.path == '/aardvark-unblock': ip = request.query_string theiruid = request.headers.get('X-Aardvark-Key', '') if theiruid == self.myuid: if ip in self.offenders: self.offenders.remove(ip) print(f"Removed IP {ip} from block list.") return aiohttp.web.Response(text="Block removed", status=200) return aiohttp.web.Response(text="No such block", status=404) # Debug output for syslog self.last_batches.append(time.time()) if len(self.last_batches) >= 25000: diff = self.last_batches[-1] - self.last_batches[0] diff += 0.01 self.last_batches = [] print("Last 25k anti spam scans done at %.2f req/sec" % (25000 / diff)) if self.processing_times: avg = sum(self.processing_times) / len(self.processing_times) self.processing_times = [] print("Average request proxy response time is %.2f ms" % (avg * 1000.0)) if self.scan_times: avg = sum(self.scan_times) / len(self.scan_times) self.scan_times = [] print("Average request scan time is %.2f ms" % (avg * 1000.0)) # Read POST data and query string post_dict = await request.post( ) # Request data as key/value pairs if applicable post_data = None if not post_dict: post_data = await request.read( ) # Request data as a blob if not valid form data get_data = request.rel_url.query # Perform scan! bad_items = [] # Check if offender is in out registry already known_offender = False if remote_ip in self.offenders: bad_items.append("Client is on the list of bad offenders.") known_offender = True else: bad_items = [] if post_data: bad_items.extend(self.scan_simple(request_url, post_data)) elif post_dict: bad_items.extend(self.scan_dict(post_dict)) # If this URL is actually to be ignored, forget all we just did! if bad_items: for iu in self.ignoreurls: if iu in request_url: print( f"Spam was detected from {remote_ip} but URL '{request_url} is on ignore list, so..." ) bad_items = [] break if bad_items: if self.debug or not (known_offender and self.suppress_repeats): print( f"Request from {remote_ip} to '{request_url}' contains possible spam:" ) for item in bad_items: print(f"[{remote_ip}]: {item}") if not known_offender: # Only save request data for new cases await self.save_request_data(request, remote_ip, post_dict or post_data) # Done with scan, log how long that took self.scan_times.append(time.time() - now) # If bad items were found, don't proxy, return empty response if bad_items: self.offenders.add(remote_ip) self.processing_times.append(time.time() - now) return aiohttp.web.Response(text=self.block_msg, status=403) async with aiohttp.ClientSession(auto_decompress=False) as session: try: req_headers = request.headers.copy() # We have to replicate the form data or we mess up file transfers form_data = None if post_dict: form_data = aiohttp.FormData() if "content-length" in req_headers: del req_headers["content-length"] if "content-type" in req_headers: del req_headers["content-type"] for k, v in post_dict.items(): if isinstance( v, aiohttp.web.FileField ): # This sets multipart properly in the request form_data.add_field(name=v.name, filename=v.filename, value=v.file.raw, content_type=v.content_type) else: form_data.add_field(name=k, value=v) async with session.request( request.method, target_url, headers=req_headers, params=get_data, data=form_data or post_data, timeout=30, allow_redirects=False, ) as resp: result = resp headers = result.headers.copy() if "server" not in headers: headers["server"] = "JIRA (via Aardvark)" self.processing_times.append(time.time() - now) # Standard response if 'content-length' in headers: raw = await result.read() response = aiohttp.web.Response(body=raw, status=result.status, headers=headers) # Chunked response else: response = aiohttp.web.StreamResponse( status=result.status, headers=headers) response.enable_chunked_encoding() await response.prepare(request) buffer = b"" async for data, end_of_http_chunk in result.content.iter_chunks( ): buffer += data if end_of_http_chunk: async with self.lock: await asyncio.wait_for( response.write(buffer), timeout=5) buffer = b"" async with self.lock: await asyncio.wait_for(response.write(buffer), timeout=5) await asyncio.wait_for(response.write(b""), timeout=5) return response except aiohttp.client_exceptions.ClientConnectorError as e: print("Could not connect to backend: " + str(e)) self.processing_times.append(time.time() - now) self.processing_times.append(time.time() - now) return aiohttp.web.Response(text=self.block_msg, status=403)
def api_call( self, api_method: str, *, http_verb: str = "POST", files: dict = None, data: dict = None, params: dict = None, json: dict = None, ): """Create a request and execute the API call to Slack. Args: api_method (str): The target Slack API method. e.g. 'chat.postMessage' http_verb (str): HTTP Verb. e.g. 'POST' files (dict): Files to multipart upload. e.g. {imageORfile: file_objectORfile_path} data: The body to attach to the request. If a dictionary is provided, form-encoding will take place. e.g. {'key1': 'value1', 'key2': 'value2'} params (dict): The URL parameters to append to the URL. e.g. {'key1': 'value1', 'key2': 'value2'} json (dict): JSON for the body to attach to the request (if files or data is not specified). e.g. {'key1': 'value1', 'key2': 'value2'} Returns: (SlackResponse) The server's response to an HTTP request. Data from the response can be accessed like a dict. If the response included 'next_cursor' it can be iterated on to execute subsequent requests. Raises: SlackApiError: The following Slack API call failed: 'chat.postMessage'. SlackRequestError: Json data can only be submitted as POST requests. """ if json is not None and http_verb != "POST": msg = "Json data can only be submitted as POST requests. GET requests should use the 'params' argument." raise err.SlackRequestError(msg) api_url = self._get_url(api_method) headers = { "User-Agent": self._get_user_agent(), "Authorization": "Bearer {}".format(self.token), } if files is not None: form_data = aiohttp.FormData() for k, v in files.items(): if isinstance(v, str): form_data.add_field(k, open(v, "rb")) else: form_data.add_field(k, v) if data is not None: for k, v in data.items(): form_data.add_field(k, str(v)) data = form_data req_args = { "headers": headers, "data": data, "params": params, "json": json, "ssl": self.ssl, "proxy": self.proxy, } if self._event_loop is None: self._set_event_loop() future = asyncio.ensure_future( self._send(http_verb=http_verb, api_url=api_url, req_args=req_args), loop=self._event_loop, ) if self.run_async or self._event_loop.is_running(): return future return self._event_loop.run_until_complete(future)
async def request(self, route, converter=None, wait=True, files=None, **kwargs): if self._session is None: bind_to = env.get("BIND_INTERFACE") if bind_to is not None: connector = aiohttp.TCPConnector(local_addr=(bind_to, 0)) else: connector = aiohttp.TCPConnector() self._session = aiohttp.ClientSession(loop=self.loop, connector=connector) for i in range(self.max_retries): options = kwargs.copy() try: if files is not None: for file in files: file.reset() data = options.get("data", aiohttp.FormData()) if "json" in options: data.add_field( "payload_json", orjson.dumps(options.pop("json")).decode("utf-8")) for i, file in enumerate(files): data.add_field(f"file{i}", file.fp, filename=file.filename, content_type='application/octet-stream') options["data"] = data ratelimit = await self.get_bucket(route.bucket) if ratelimit: if wait: await ratelimit.wait() else: raise HTTPTooManyRequests("Bucket depleted") await self.semaphore.acquire() try: result = await self._perform_request(route, **options) finally: self.semaphore.release() if converter: return converter(result) return result except HTTPException as e: if e.status == 400: raise HTTPBadRequest(e.text) elif e.status == 401: raise HTTPUnauthorized(e.text) elif e.status == 403: raise HTTPForbidden(e.text) elif e.status == 404: raise HTTPNotFound(e.text) elif e.status == 429: if not wait: raise HTTPTooManyRequests(e.text) else: await asyncio.sleep(i) elif e.status < 500 or i == self.max_retries - 1: raise e else: await asyncio.sleep(i)
def _build_form_payload(req_args: dict) -> Tuple[str, aiohttp.FormData]: data = aiohttp.FormData() for k, v in req_args.items(): data.add_field(k, v) return 'data', data
async def edit_image(self, ctx: context.Context, url: str, edit_type: str, **kwargs): if ctx.message.attachments: url = ctx.message.attachments[0].url if url is None: url = str( ctx.author.avatar_url_as( format='gif' if ctx.author.is_avatar_animated() is True else 'png')) try: async with self.bot.session.get(url) as response: image_bytes = await response.read() except Exception: raise ArgumentError( f'Something went wrong while trying to get that image. Check the url.' ) else: content_type = response.headers.get('Content-Type') content_length = response.headers.get('Content-Length') if content_type not in [ 'image/png', 'image/gif', 'image/jpeg', 'image/webp' ]: raise ImageError( 'That file format is not allowed, only png, gif, jpg and webp are allowed.' ) if content_length and int(content_length) > 15728640: raise ImageError('That file is over 15mb.') parent_pipe, child_pipe = multiprocessing.Pipe() args = (image_operations[edit_type], image_bytes, child_pipe) process = multiprocessing.Process(target=do_edit_image, kwargs=kwargs, daemon=True, args=args) process.start() data = await self.bot.loop.run_in_executor(None, parent_pipe.recv) if isinstance(data, ImageError): process.terminate() raise ImageError( 'Something went wrong while trying to process that image.') process.join() process.close() image = data['image'] image_format = data['format'] image_text = data['text'] url = 'https://idevision.net/api/media/post' headers = {"Authorization": self.bot.config.idevision_key} upload_data = aiohttp.FormData() upload_data.add_field('file', image, filename=f'image.{image_format.lower()}') async with self.bot.session.post(url, data=upload_data, headers=headers) as response: if response.status == 413: raise ImageError('The image produced was over 20mb.') post = await response.json() embed = discord.Embed(colour=ctx.colour) embed.set_footer(text=image_text) embed.set_image(url=post.get('sike_heres_the_real_url')) return embed
async def run(self, ctx: commands.Context, lang, *, code: str): ''' C++,Python,Ruby,Rust,Haskell,Javascript,Shellのソースコードを走らせます. Twitterのシェル芸Botと同様,ファイルを送信したら/mediaにファイルが置かれ, 画像を/imagesに保存すると帰ってきます ''' if lang in ['py', 'python']: l = 'python' elif lang in ['c', 'c++', 'cpp']: l = 'cpp' elif lang in ['js', 'javascript']: l = 'javascript' elif lang in ['hs', 'haskell']: l = 'haskell' elif lang in ['rb', 'ruby']: l = 'ruby' elif lang in ['rs', 'rust']: l = 'rust' elif lang in ['sh', 'bash', 'zsh', 'shell']: l = 'shell' else: return await self.bot.send_error( ctx, '引数が不正です!', '第一引数には言語名を入れてください!\n\n' + '対応言語: ' + ','.join([ 'python', 'c++', 'javascript', 'ruby', 'haskell', 'shell' ])) ptn: Pattern[str] = re.compile(r'```(.*)?\n((.|\s)*)?```') codes = ptn.findall(code) if len(codes) == 1: _code = codes[0] if len(_code) == 3: code = _code[1] if code[0] == '`' and code[-1] == '`': code = code[1:-1] form = aiohttp.FormData() form.add_field('source', code) form.add_field('language', l) if ctx.message.attachments: fs = ctx.message.attachments[:4] for i, f in enumerate(fs): data = await f.read() form.add_field(f"f{i}", data) async with aiohttp.ClientSession() as session: async with session.post('https://bash.chomama.jp/api/run', data=form) as resp: data = await resp.json() if len(data["stdout"]) > 0: if len(data["stdout"].split('\n')) > 20: data["stdout"] = "\n".join( data["stdout"].split('\n')[:20]) + "\n..." if len(data["stdout"]) > 500: data["stdout"] = data["stdout"][:500] + "..." await ctx.message.add_reaction('✅') await ctx.send(f'**標準出力**\n```\n{data["stdout"]}\n```') if len(data["stderr"]) > 0: if len(data["stderr"].split('\n')) > 20: data["stderr"] = "\n".join( data["stderr"].split('\n')[:20]) + "\n..." if len(data["stderr"]) > 500: data["stderr"] = data["stderr"][:500] + "..." await ctx.message.add_reaction('⚠') await ctx.send(f'**標準エラー出力**\n```\n{data["stderr"]}\n```') if len(data["images"]) > 0: await ctx.message.add_reaction('🖼') files = [] for url in data["images"]: async with aiohttp.ClientSession() as session: async with session.get( f"https://bash.chomama.jp{url}") as resp: if resp.status != 200: continue fn = url.split("/")[-1] g = io.BytesIO(await resp.read()) files.append(discord.File(g, fn)) await ctx.send(files=files) await ctx.send( f"`終了コード: {data['exit_code']}`\n`実行時間: {data['exec_sec']}`")
#Check the execution state every 2 seconds while not flag: #Get the current state state = json.loads(asyncio.run(ws_client.getState( token)))['webservice']['state'] # get the WebService current state if (state != "complete" and state != "error"): print(state) asyncio.run(delay(2)) #If test is error elif state == "error": flag = True #If test is complete else: print(state) froamData = aiohttp.FormData() froamData.add_field( 'token', token, content_type='application/x-www-form-urlencoded') froamData.add_field( 'file', "reports.zip", content_type='application/x-www-form-urlencoded') #download the test report to the target file path asyncio.run(ws_client.download(froamData, "./reports.zip", 0)) #Download the logs froamData = aiohttp.FormData() froamData.add_field( 'token',
async def _async_request( self, method: str = "POST", payload: dict = None, file: Optional[File] = None, headers: dict = None, ) -> Optional["Webhook"]: """ Async version of the request function using aiohttp. """ # type annotation support for Python 3.5 self.session = self.session # type: aiohttp.ClientSession if payload is None: payload = {} if headers is None: headers = {} rate_limited = True resp = None while rate_limited: if method == "POST": if file is not None: data = aiohttp.FormData() data.add_field("file", file.fp, filename=file.name) data.add_field("payload_json", json.dumps(payload)) resp = await self.session.post(self.url, data=data, headers=headers) else: headers["Content-Type"] = "application/json" resp = await self.session.post(self.url, json=payload, headers=headers) elif method == "DELETE": resp = await self.session.delete(self.url, headers=headers) elif method == "PATCH": resp = await self.session.patch(self.url, json=payload, headers=headers) elif method == "GET": resp = await self.session.get(self.url, headers=headers) else: raise ValueError("Bad method: {}".format(method)) if resp.status == 429: # Too many request await asyncio.sleep( (await resp.json())["retry_after"] / 1000.0) if file is not None: file.seek() continue else: if file is not None: file.close() rate_limited = False if resp.status == 204: # method DELETE return resp.raise_for_status() self._update_fields(await resp.json()) return self
async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, extra_args: Dict[str, Any] = None, upload_files: bool = False, ) -> ExecutionResult: """Execute the provided document AST against the configured remote server using the current session. This uses the aiohttp library to perform a HTTP POST request asynchronously to the remote server. Don't call this coroutine directly on the transport, instead use :code:`execute` on a client or a session. :param document: the parsed GraphQL request :param variable_values: An optional Dict of variable values :param operation_name: An optional Operation name for the request :param extra_args: additional arguments to send to the aiohttp post method :param upload_files: Set to True if you want to put files in the variable values :returns: an ExecutionResult object. """ query_str = print_ast(document) payload: Dict[str, Any] = { "query": query_str, } if operation_name: payload["operationName"] = operation_name if upload_files: # If the upload_files flag is set, then we need variable_values assert variable_values is not None # If we upload files, we will extract the files present in the # variable_values dict and replace them by null values nulled_variable_values, files = extract_files( variables=variable_values, file_classes=self.file_classes, ) # Save the nulled variable values in the payload payload["variables"] = nulled_variable_values # Prepare aiohttp to send multipart-encoded data data = aiohttp.FormData() # Generate the file map # path is nested in a list because the spec allows multiple pointers # to the same file. But we don't support that. # Will generate something like {"0": ["variables.file"]} file_map = {str(i): [path] for i, path in enumerate(files)} # Enumerate the file streams # Will generate something like {'0': <_io.BufferedReader ...>} file_streams = { str(i): files[path] for i, path in enumerate(files) } # Add the payload to the operations field operations_str = json.dumps(payload) log.debug("operations %s", operations_str) data.add_field("operations", operations_str, content_type="application/json") # Add the file map field file_map_str = json.dumps(file_map) log.debug("file_map %s", file_map_str) data.add_field("map", file_map_str, content_type="application/json") # Add the extracted files as remaining fields for k, v in file_streams.items(): data.add_field(k, v, filename=getattr(v, "name", k)) post_args: Dict[str, Any] = {"data": data} else: if variable_values: payload["variables"] = variable_values if log.isEnabledFor(logging.INFO): log.info(">>> %s", json.dumps(payload)) post_args = {"json": payload} # Pass post_args to aiohttp post method if extra_args: post_args.update(extra_args) # Add headers for AppSync if requested if isinstance(self.auth, AppSyncAuthentication): post_args["headers"] = self.auth.get_headers( json.dumps(payload), {"content-type": "application/json"}, ) if self.session is None: raise TransportClosed("Transport is not connected") async with self.session.post(self.url, ssl=self.ssl, **post_args) as resp: async def raise_response_error(resp: aiohttp.ClientResponse, reason: str): # We raise a TransportServerError if the status code is 400 or higher # We raise a TransportProtocolError in the other cases try: # Raise a ClientResponseError if response status is 400 or higher resp.raise_for_status() except ClientResponseError as e: raise TransportServerError(str(e), e.status) from e result_text = await resp.text() raise TransportProtocolError( f"Server did not return a GraphQL result: " f"{reason}: " f"{result_text}") try: result = await resp.json(content_type=None) if log.isEnabledFor(logging.INFO): result_text = await resp.text() log.info("<<< %s", result_text) except Exception: await raise_response_error(resp, "Not a JSON answer") if "errors" not in result and "data" not in result: await raise_response_error( resp, 'No "data" or "errors" keys in answer') # Saving latest response headers in the transport self.response_headers = resp.headers return ExecutionResult( errors=result.get("errors"), data=result.get("data"), extensions=result.get("extensions"), )
async def send_data( sess, client_id, split_set, pair_id, epoch, iteration, ldr_image, hdr_image, seg_map, seg_vis, normal_image, params, *, host, port, protocol='https', login='******', password='******' ): metadata = { 'split_set': split_set, 'client_id': client_id, 'pair_id': pair_id, 'epoch': epoch, 'iteration': iteration, } auth = aiohttp.BasicAuth(login=login, password=password) with NamedTemporaryFile(suffix='.exr') as hdr_f: save_hdr(hdr_f.name, hdr_image) data = aiohttp.FormData() data.add_field('metadata', json.dumps(metadata), filename='metadata.json', content_type='application/json') data.add_field('params', json.dumps(params), filename='params.json', content_type='application/json') data.add_field('ldr', array_to_jpg_file(ldr_image), filename='ldr.jpg', content_type='image/jpeg') data.add_field('hdr', open(hdr_f.name, 'rb'), filename='hdr.exr', content_type='image/x-exr') data.add_field('segment_map', array_to_png_file(seg_map), filename='segment_map.png', content_type='image/png') data.add_field('segment_vis', array_to_png_file(seg_vis), filename='segment_vis.png', content_type='image/png') data.add_field('normal_image', array_to_png_file(normal_image), filename='normal_image.png', content_type='image/png') async with sess.post(f'{protocol}://{host}:{port}/submit', data=data, auth=auth) as r: print(await r.json())
async def request(self, method, url, headers=None, params=None, post_params=None, data=None): """Submit a POST request to the provided URL and data. :param str method: The HTTP verb to use. :param str url: A URL to query :param dict headers: A dictionary with any extra headers to add to the request :param dict data: A dictionary with the data to use for the body of the POST :apram dict params: A dictionary with parameters :apram dict post_params: A dictionary with post parameters :return obj: A requests.Response object received as a response """ master_headers = self.__headers.copy() if headers: for header in headers: master_headers[header] = headers[header] else: headers = master_headers args = { 'method': method, 'url': url, 'timeout': self.__timeout, 'params': params, 'headers': headers, 'proxy': os.getenv('http_proxy', os.getenv('HTTP_PROXY', None)), } if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: if 'Content-Type' not in headers or re.search( 'json', headers['Content-Type'], re.IGNORECASE): if data is not None: args['data'] = data elif headers[ 'Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 args['data'] = aiohttp.FormData(post_params) # Pass a `bytes` parameter directly in the body to support # other content types than Json when `body` argument is provided # in serialized form elif isinstance(data, bytes): args['data'] = data else: # Cannot generate the request from given parameters raise InvalidRequest( 'Cannot prepare a request message for provided arguments. ' 'Please check that your arguments match ' 'declared content type.') try: result = await self.__session.request(**args) except (ClientConnectionError, ClientPayloadError, ClientSSLError) as err: raise SectigoConnectionError(str(err), None, args['url']) await self._raise_for_status(result) return result
async def postfiles(self, fields, url, params=None, headers=None, method='POST', ssl=True, timeout=None): ''' Send files from the axon as fields in a multipart/form-data HTTP request. Args: fields (list): List of dicts containing the fields to add to the request as form-data. url (str): The URL to retrieve. params (dict): Additional parameters to add to the URL. headers (dict): Additional HTTP headers to add in the request. method (str): The HTTP method to use. ssl (bool): Perform SSL verification. timeout (int): The timeout of the request, in seconds. Notes: The dictionaries in the fields list may contain the following values:: { 'name': <str> - Name of the field. 'sha256': <str> - SHA256 hash of the file to submit for this field. 'value': <str> - Value for the field. Ignored if a sha256 has been specified. 'filename': <str> - Optional filename for the field. 'content_type': <str> - Optional content type for the field. 'content_transfer_encoding': <str> - Optional content-transfer-encoding header for the field. } The dictionary returned by this may contain the following values:: { 'ok': <boolean> - False if there were exceptions retrieving the URL. 'err': <str> - An error message if there was an exception when retrieving the URL. 'url': <str> - The URL retrieved (which could have been redirected) 'code': <int> - The response code. 'body': <bytes> - The response body. 'headers': <dict> - The response headers as a dictionary. } Returns: dict: An information dictionary containing the results of the request. ''' proxyurl = self.conf.get('http:proxy') cadir = self.conf.get('tls:ca:dir') connector = None if proxyurl is not None: connector = aiohttp_socks.ProxyConnector.from_url(proxyurl) if ssl is False: pass elif cadir: ssl = s_common.getSslCtx(cadir) else: # default aiohttp behavior ssl = None atimeout = aiohttp.ClientTimeout(total=timeout) async with aiohttp.ClientSession(connector=connector, timeout=atimeout) as sess: try: data = aiohttp.FormData() data._is_multipart = True for field in fields: sha256 = field.get('sha256') if sha256: valu = self.get(s_common.uhex(sha256)) else: valu = field.get('value') data.add_field(field.get('name'), valu, content_type=field.get('content_type'), filename=field.get('filename'), content_transfer_encoding=field.get( 'content_transfer_encoding')) async with sess.request(method, url, headers=headers, params=params, data=data, ssl=ssl) as resp: info = { 'ok': True, 'url': str(resp.url), 'code': resp.status, 'body': await resp.read(), 'headers': dict(resp.headers), } return info except asyncio.CancelledError: # pramga: no cover raise except Exception as e: logger.exception( f'Error POSTing files to [{s_urlhelp.sanitizeUrl(url)}]') exc = s_common.excinfo(e) mesg = exc.get('errmsg') if not mesg: mesg = exc.get('err') return { 'ok': False, 'err': mesg, 'url': url, 'body': b'', 'code': -1, 'headers': dict(), }
async def do_proxy_message(self, conn, member: db.ProxyMember, original_message: discord.Message, text: str, attachment_url: str, has_already_retried=False): hook_id, hook_token = await self.get_webhook_for_channel( conn, original_message.channel) form_data = aiohttp.FormData() form_data.add_field( "username", "{} {}".format(member.name, member.tag or "").strip()) if text: form_data.add_field("content", text) if attachment_url: attachment_resp = await self.session.get(attachment_url) form_data.add_field("file", attachment_resp.content, content_type=attachment_resp.content_type, filename=attachment_resp.url.name) if member.avatar_url: form_data.add_field("avatar_url", member.avatar_url) time_before = time.perf_counter() async with self.session.post( "https://discordapp.com/api/v6/webhooks/{}/{}?wait=true". format(hook_id, hook_token), data=form_data) as resp: if resp.status == 200: message = await resp.json() # Report webhook stats to Influx await self.stats.report_webhook( time.perf_counter() - time_before, True) await db.add_message(conn, message["id"], message["channel_id"], member.id, original_message.author.id, text or "") try: await self.client.delete_message(original_message) except discord.Forbidden: self.logger.warning( "Did not have permission to delete original message (server={}, channel={})" .format(original_message.server.id, original_message.channel.id)) raise DeletionPermissionError() except discord.NotFound: self.logger.warning( "Tried to delete message when proxying, but message was already gone (server={}, channel={})" .format(original_message.server.id, original_message.channel.id)) message_image = None if message["attachments"]: first_attachment = message["attachments"][0] if "width" in first_attachment and "height" in first_attachment: # Only log attachments that are actually images message_image = first_attachment["url"] await self.channel_logger.log_message_proxied( conn, server_id=original_message.server.id, channel_name=original_message.channel.name, channel_id=original_message.channel.id, sender_name=original_message.author.name, sender_disc=original_message.author.discriminator, member_name=member.name, member_hid=member.hid, member_avatar_url=member.avatar_url, system_name=member.system_name, system_hid=member.system_hid, message_text=text, message_image=message_image, message_timestamp=ciso8601.parse_datetime( message["timestamp"]), message_id=message["id"]) elif resp.status == 404 and not has_already_retried: # Report webhook stats to Influx await self.stats.report_webhook( time.perf_counter() - time_before, False) # Webhook doesn't exist. Delete it from the DB, create, and add a new one self.logger.warning( "Webhook registered in DB doesn't exist, deleting hook from DB, re-adding, and trying again (channel={}, hook={})" .format(original_message.channel.id, hook_id)) await db.delete_webhook(conn, original_message.channel.id) await self.create_and_add_channel_webhook( conn, original_message.channel) # Then try again all over, making sure to not retry again and go in a loop should it continually fail return await self.do_proxy_message(conn, member, original_message, text, attachment_url, has_already_retried=True) else: # Report webhook stats to Influx await self.stats.report_webhook( time.perf_counter() - time_before, False) raise discord.HTTPException(resp, await resp.text())
async def upload_associated_file(self, presigned_post: W24PresignedPost, content: Optional[bytes]) -> None: """ Upload an associated file to the API. This can either be a technical drawing or a 3D model. Potentially we will sometime extend this to also include cover pages. NOTE: the complete message size must not be larger than 10 MB Arguments: request_id {str} -- UUID4 request id that you obtained from the websocket filetype {str} -- filetype that we want to upload. currently supported: drawing, model content {bytes} -- content of the file as bytes Raises: BadRequestException: Raised when the request body cannot be interpreted. This normally indicates that the API version has been updated and that we missed a corner case. If you encounter this exception, it is very likely our mistake. Please get in touch! UnauthorizedException: Raised when the token or the requested file have expired ResourceNotFoundException: Raised when you are requesting an endpoint that does not exist. Again, you should not encounter this, but if you do, let us know. RequestTooLargeException: Raised when the status code was 413 UnsupportedMediaTypException: Raised when the file you submitted cannot be read (because its media type is not supported by the API). ServerException: Raised for all other status codes that are not 2xx """ # ignore if payload is empty if content is None: return # generate the form data by merging the presigned # fields with the file form = aiohttp.FormData() for key, value in presigned_post.fields_.items(): form.add_field(key, value) form.add_field('file', content) # create a new fresh session that does not # carry the authentication token async with aiohttp.ClientSession() as sess: async with sess.post(presigned_post.url, data=form) as resp: # check the status code of the response and # raise the appropriate exception self._raise_for_status(presigned_post.url, resp.status)
async def upload_attachment(self, attachment, peer_id=None): """ Upload specified attachment to VKontakte with specified peer_id and return newly uploaded attachment. This method doesn't change passed attachments. """ attachment_type = attachment.type if attachment_type == "voice": attachment_type = "doc" doctype = "audio_message" elif attachment_type == "graffiti": attachment_type = "doc" doctype = "graffiti" else: doctype = "doc" if attachment_type == "doc": if peer_id and doctype != "graffiti": upload_data = await self._request( "docs.getMessagesUploadServer", { "peer_id": peer_id, "type": doctype }, ) else: upload_data = await self._request( "docs.getWallUploadServer", { "group_id": self.group_id, "type": doctype }, ) data = aiohttp.FormData() data.add_field( "file", attachment.file, filename=attachment.file_name, ) upload_result = await self._upload_file_to_vk( upload_data["upload_url"], data) attachment = await self._request("docs.save", upload_result) return self._make_attachment(attachment) if attachment_type == "image": upload_data = await self._request("photos.getMessagesUploadServer", {"peer_id": peer_id}) data = aiohttp.FormData() data.add_field("photo", attachment.file, filename=attachment.file_name) upload_result = await self._upload_file_to_vk( upload_data["upload_url"], data) try: attachments = await self._request("photos.saveMessagesPhoto", upload_result) except RequestException as e: if not peer_id or not e.error or e.error["error_code"] != 1: raise return await self.upload_attachment(attachment, peer_id=None) return self._make_attachment({ "type": "photo", "photo": attachments[0], }) raise ValueError(f"Can't upload attachment '{attachment_type}'")
async def upload_file_async(self, to_upload, item_type, item_size, remote_url, uploaded_filename, remote_path=None, callback=None, mode='skip', item_metadata=None): headers = self.auth headers['User-Agent'] = requests_toolbelt.user_agent( 'dtlpy', __version__.version) pbar = None if callback is None: if item_size > 10e6: # size larger than 10MB pbar = tqdm.tqdm(total=item_size, unit="B", unit_scale=True, unit_divisor=1024, position=1, disable=self.verbose.disable_progress_bar) def callback(bytes_read): pbar.update(bytes_read) else: def callback(bytes_read): pass timeout = aiohttp.ClientTimeout(total=0) async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session: try: form = aiohttp.FormData({}) form.add_field('type', item_type) form.add_field( 'path', os.path.join(remote_path, uploaded_filename).replace('\\', '/')) if item_metadata is not None: form.add_field('metadata', json.dumps(item_metadata)) form.add_field( 'file', AsyncUploadStream(buffer=to_upload, callback=callback)) url = '{}?mode={}'.format(self.environment + remote_url, mode) async with session.post(url, data=form, verify_ssl=self.verify) as resp: text = await resp.text() try: _json = await resp.json() except: _json = dict() response = AsyncResponse(text=text, _json=_json, async_resp=resp) except Exception as err: response = AsyncResponseError(error=err, trace=traceback.format_exc()) finally: if pbar is not None: pbar.close() with threadLock: self.calls_counter.add() return response
async def _async_request(self, method: str, url: str, timeout_pair: Tuple[float, float], headers: dict, params: dict, payload: dict, data: dict, files: dict) -> Future: if not self.session_async: self.session_async = aiohttp.ClientSession() if not self.event_loop or self.event_loop.is_closed(): self.event_loop = _get_or_create_event_loop() request_id = secrets.token_hex(4) try: timeout = aiohttp.ClientTimeout(sock_connect=timeout_pair[0], sock_read=timeout_pair[1]) if method == "GET": self.logger.debug( f"GET url={url} params={params} headers={headers} (id={request_id})" ) resp = await self.session_async.get(url, params=params, headers=headers, timeout=timeout) elif method == "POST": if data or files: # multipart/form-data form = aiohttp.FormData() for key, value in data.items(): form.add_field(key, value) for filename, content in files.items(): assert not isinstance( content, (tuple, list)), "暂时只支持bytes/str/fileobj等格式的上传数据" data.add_field(filename, content) self.logger.debug( f"POST(form-data) url={url} params={params} " f"data.keys={data.keys()} headers={headers} (id={request_id})" ) resp = await self.session_async.post(url, params=params, data=data, headers=headers, timeout=timeout) else: # application/json self.logger.debug( f"POST url={url} params={params} json={payload} " f"headers={headers} (id={request_id})") resp = await self.session_async.post(url, params=params, json=payload, headers=headers, timeout=timeout) else: raise FeishuError( ERRORS.UNSUPPORTED_METHOD, f"不支持的请求method: {method}, 调用上下文: " f"url={url}, params={params}, payload={payload} " f"data={data} files.keys={files.keys()}") except aiohttp.ClientError as e: raise FeishuError(ERRORS.FAILED_TO_ESTABLISH_CONNECTION, f"建立和服务器的请求失败: {e}") try: result = await resp.json(content_type=None) except ValueError: raise FeishuError(ERRORS.UNABLE_TO_PARSE_SERVER_RESPONSE, f"服务器返回格式有问题,无法解析成JSON: {resp.text}") if result.get("code") != 0: raise FeishuError( result.get("code") or ERRORS.UNKNOWN_SERVER_ERROR, result.get("msg") or f"无有效出错信息,返回JSON数据为: {result}") self.logger.debug(f"response={result} (id={request_id})") return result
async def request(self, method, url, query_params=None, headers=None, body=None, post_params=None, _preload_content=True, _request_timeout=None): """Execute request :param method: http request method :param url: http request url :param query_params: query parameters in the url :param headers: http request headers :param body: request json body, for `application/json` :param post_params: request post parameters, `application/x-www-form-urlencoded` and `multipart/form-data` :param _preload_content: this is a non-applicable field for the AiohttpClient. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. """ method = method.upper() assert method in [ 'GET', 'HEAD', 'DELETE', 'POST', 'PUT', 'PATCH', 'OPTIONS' ] if post_params and body: raise ApiValueError( "body parameter cannot be used with post_params parameter.") post_params = post_params or {} headers = headers or {} timeout = _request_timeout or 5 * 60 if 'Content-Type' not in headers: headers['Content-Type'] = 'application/json' args = { "method": method, "url": url, "timeout": timeout, "headers": headers } if query_params: args["url"] += '?' + urlencode(query_params) # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: if re.search('json', headers['Content-Type'], re.IGNORECASE): if body is not None: body = json.dumps(body) args["data"] = body elif headers[ 'Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 args["data"] = aiohttp.FormData(post_params) elif headers['Content-Type'] == 'multipart/form-data': # must del headers['Content-Type'], or the correct # Content-Type which generated by aiohttp del headers['Content-Type'] data = aiohttp.FormData() for param in post_params: k, v = param if isinstance(v, tuple) and len(v) == 3: data.add_field(k, value=v[1], filename=v[0], content_type=v[2]) else: data.add_field(k, v) args["data"] = data # Pass a `bytes` parameter directly in the body to support # other content types than Json when `body` argument is provided # in serialized form elif isinstance(body, bytes): args["data"] = body else: # Cannot generate the request from given parameters msg = """Cannot prepare a request message for provided arguments. Please check that your arguments match declared content type.""" raise ApiException(status=0, reason=msg) r = await self.pool_manager.request(**args) if _preload_content: data = await r.text() r = RESTResponse(r, data) # log response body logger.debug("response body: %s", r.data) if not 200 <= r.status <= 299: raise ApiException(http_resp=r) return r
async def test_upload_notauth(wui_client: aiohttp.ClientSession): data = aiohttp.FormData() data.add_field('file', b'', filename='empty.gcode') response = await wui_client.post('/api/files/sdcard', data=data) assert response.status == 401
async def request(self, verb, url, payload=None, multipart=None, *, files=None): headers = {} data = None files = files or [] if payload: headers['Content-Type'] = 'application/json' data = utils.to_json(payload) if multipart: data = aiohttp.FormData() for key, value in multipart.items(): if key.startswith('file'): data.add_field(key, value[1], filename=value[0], content_type=value[2]) else: data.add_field(key, value) for tries in range(5): for file in files: file.reset(seek=tries) async with self.session.request(verb, url, headers=headers, data=data) as r: response = await r.text(encoding='utf-8') if r.headers['Content-Type'] == 'application/json': response = json.loads(response) # check if we have rate limit header information remaining = r.headers.get('X-Ratelimit-Remaining') if remaining == '0' and r.status != 429: delta = utils._parse_ratelimit_header(r) await asyncio.sleep(delta) if 300 > r.status >= 200: return response # we are being rate limited if r.status == 429: retry_after = response['retry_after'] / 1000.0 await asyncio.sleep(retry_after) continue if r.status in (500, 502): await asyncio.sleep(1 + tries * 2) continue if r.status == 403: raise Forbidden(r, response) elif r.status == 404: raise NotFound(r, response) else: raise HTTPException(r, response) # no more retries raise HTTPException(r, response)
async def upload(self): if not self.session: raise Exception('Connection not established') self.logger.debug('Uploading %r', self) try: filename = await self._download_media() except ConnectionError as e: self.logger.warning( 'Got connection error on download_media: "%s", retrying...', e) try: filename = await self._download_media() except ConnectionError: self.logger.exception('Failed to download_media') return None postdata = self._build_postdata() self.logger.debug('POST %s %s', self.endpoint, postdata) async with self.session.post(self.endpoint, data=postdata) as resp: resp = await resp.json(loads=ujson.loads) self.logger.log(logging.DEBUG - 5, 'POST %s response: %s', self.endpoint, resp) if resp.get('error'): raise VKException(resp) upload_url = resp.get('response', {}).get('upload_url') with open(filename, 'rb') as f: formdata = aiohttp.FormData() formdata.add_field(self._field_name, f, filename=self.original_filename or os.path.basename(filename), content_type=self.mimetype) self.logger.debug('POST %s %s', upload_url, formdata) async with self.session.post(upload_url, data=formdata) as resp: try: resp = await resp.json(content_type=None, loads=ujson.loads) self.logger.log(logging.DEBUG - 5, 'POST %s response: %s', upload_url, resp) except aiohttp.client_exceptions.ContentTypeError: self.logger.error('Invalid response: %s', await resp.text()) return None if resp.get('error'): raise VKException(resp) save_ticket = self._build_postdata1(resp) try: os.unlink(filename) except OSError: pass self.logger.debug('POST %s %s', self.endpoint1, save_ticket) async with self.session.post(self.endpoint1, data=save_ticket) as resp: resp = await resp.json(loads=ujson.loads) self.logger.log(logging.DEBUG - 5, 'POST %s response: %s', self.endpoint1, resp) if resp.get('error'): raise VKException(resp) self.media_id = self._get_id(resp) return self.media_id