Пример #1
0
    async def close(self, method=None, timeout=None):
        if method is None:
            if not self._closed:
                if self._state == CallState.Terminated:
                    return await self.bye(timeout=timeout)
                elif self._state != CallState.Completed:
                    return await self.cancel(timeout=timeout)
            return

        self._closed = True
        msg = self._prepare_request(method)
        authorization = self.original_msg.headers.get('Authorization')
        if authorization and not msg.headers.get('Authorization'):
            if hasattr(authorization, 'next'):
                authorization.next(method=msg.method)
            msg.headers['Authorization'] = authorization
            msg.to_details = msg.to_details.clone()
            msg.contact_details = None
            msg.headers['Via'] = self.original_msg.headers['Via']
        transaction = UnreliableTransaction(self,
                                            original_msg=msg,
                                            loop=self.app.loop)
        self.transactions[msg.method][msg.cseq] = transaction

        try:
            async with Timeout(timeout):
                await transaction.start(
                    skip_wait=self._state == CallState.Calling)
        finally:
            self._close()
Пример #2
0
    async def sget(self, url, *, buffer=False, json=None, timeout=2, max_size=3*(10**6), trust_host=False):
        """
        An async function that simplifies the logic of a get method
        """
        try:
            async with Timeout(timeout, loop=self.loop):
                async with self.session.get(url) as r:
                    try:
                        if int(r.headers["Content-Length"]) > max_size:
                            raise SgetError(f"Tentative de téléchargement d'un fichier trop gros.")
                    except KeyError:
                        # Sometimes, Content-Lenght header will be missing.
                        # If we trust the host, we can ignore the max_size...
                        if not trust_host:
                            raise SgetError("Taille du fichier à téléchargement inconue.")

                    if r.status == 200:
                        if json:
                            j = await r.json()
                            return j[json]

                        elif buffer:
                            data = await r.read()
                            return io.BytesIO(data)
                        else:
                            return await r.read()
                    else:
                        raise SgetError(f"Le téléchargement n'a pas abouti (status HTTP: {r.status}).")
        except asyncio.TimeoutError as e:
            raise SgetError("Le téléchargement à pris trop de temps.") from e
Пример #3
0
    async def do_ascii_cmd(self, ctx, objet, cmap, *, is_big=False):
        objet = objet or str(ctx.author.avatar_url_as(format="png", size=256))

        async with ctx.loading() as load:
            await load.update("Téléchargement de l'image...")
            objet = await self.bot.sget(objet)

            await load.update("Traitement...")
            time_ = time.perf_counter()
            out = await self.bot.in_thread(self.process_ascii, objet, cmap,
                                           is_big)
            time_ = time.perf_counter() - time_

            if is_big:
                await load.update("Envoi...")
                url = "https://wastebin.travitia.xyz/"
                try:
                    async with Timeout(3, loop=self.bot.loop):
                        async with self.bot.session.post(
                                url=f"{url}documents",
                                data=out.encode("utf-8")) as p:
                            if p.status == 200:
                                res = await p.json()
                                bin_url = url + res["key"]
                            else:
                                raise asyncio.TimeoutError
                except asyncio.TimeoutError:
                    # Failed..., so we try the others
                    bin_url = await self.bot.safe_bin_post(out)

                await ctx.send(f"*En {round(time_ * 1000, 3)}ms:*\n{bin_url}")
            else:
                await ctx.send(f"*En {round(time_ * 1000, 3)}ms:*")
                await ctx.send(f"```\n{out}\n```")
Пример #4
0
	async def proxy_request(self, method:str, url:str, **kwargs):
		json = kwargs.pop('json', False)
		text = kwargs.pop('text', False)
		b = kwargs.pop('b', False)
		rheaders = kwargs.pop('rheaders', False)
		#try 2 times incase bad proxy
		retries = kwargs.pop('max_retries', 2)
		for i in range(retries):
			try:
				async with Timeout(kwargs.pop('timeout', 10)):
					ba = aiohttp.Basic
					('user2016958', 'GDJCFP23')
					proxy = f"http://{self.get_proxy()}:6060"
					async with getattr(self.session, method)(url, proxy=proxy, proxy_auth=ba, **kwargs) as resp:
						# assert resp.status == 200
						if json:
							r = await resp.json(content_type=None)
						elif text:
							r = await resp.text()
						elif b:
							r = BytesIO(await resp.read())
							r.seek(0)
						else:
							r = resp
						return (r, resp.headers) if rheaders else r
			except Exception as e:
				if self.bot.dev_mode:
					print(e)
					raise e
				elif i < (retries - 1):
					continue
				return False
Пример #5
0
	async def run_process(self, args, response=False, b=False, stdin=None, shell=False):
		func = asyncio.create_subprocess_exec
		if shell:
			func = asyncio.create_subprocess_shell
			args = (' '.join(args),)
		inp = stdin.read() if isinstance(stdin, BytesIO) else stdin
		stdin = stdin or asyncio.subprocess.PIPE
		try:
			async with Timeout(120):
				proc = await func(
					*args, stdin=stdin, stderr=asyncio.subprocess.PIPE,
					stdout=asyncio.subprocess.PIPE, loop=self.loop
				)
				data, _ = await proc.communicate(input=inp)
		except asyncio.TimeoutError:
			proc.terminate()
			# let it cleanup
			await asyncio.sleep(5)
			if not proc.returncode: # force kill if taking too long
				proc.kill()
			assert False, 'Processing timeout exceeded.'
		if b:
			b = BytesIO(data)
			return b.read()
		elif response:
			try:
				decoded = data.decode('ascii')
			except:
				decoded = data.decode('utf-8')
			return decoded.rstrip()
		return True
Пример #6
0
	async def get_cookies(self, url:str, **kwargs):
		try:
			async with Timeout(kwargs.pop('timeout', 5)):
				async with self.session.get(url, **kwargs) as r:
					return r.cookies
		except:
			return False
Пример #7
0
	async def get_json(self, url:str, timeout:int=5, headers=None, data=None, content_type='application/json'):
		method = self.session.post if data else self.session.get
		try:
			async with Timeout(timeout):
				async with method(url, headers=headers, data=data) as r:
					r.raise_for_status()
					load = await r.json(content_type=content_type)
					return load
		except:
			return {}
Пример #8
0
 async def request(self,
                   method,
                   contact_details=None,
                   headers=None,
                   payload=None,
                   timeout=None):
     msg = self._prepare_request(method, contact_details, headers, payload)
     if msg.method != 'ACK':
         async with Timeout(timeout):
             return await self.start_unreliable_transaction(msg)
     else:
         self.peer.send_message(msg)
Пример #9
0
 async def safe_bin_post(self, content):
     data = content.encode("utf-8")
     for url in BINS:
         try:
             async with Timeout(2, loop=self.loop):
                 async with self.session.post(url=f"{url}documents", data=data) as p:
                     if p.status == 200:
                         res = await p.json()
                         return url + res["key"]
                     else:
                         continue
         except asyncio.TimeoutError:
             pass
     return None
Пример #10
0
	async def post_data(self, url:str, data=None, **kwargs):
		t = kwargs.pop('timeout', 5)
		headers = kwargs.pop('headers', self.default_headers)
		try:
			async with Timeout(t):
				async with self.session.post(url, headers=headers, data=data) as resp:
					r = True
					if kwargs.pop('read', False):
						r = await resp.read(), resp.status
					elif kwargs.pop('json', False):
						r = await resp.json()
					elif kwargs.pop('text', False):
						r = await resp.text(encoding='utf-8')
					return (r, resp.headers) if kwargs.get('rheaders') else r
		except Exception as e:
			if self.bot.dev_mode:
				print(e)
				raise e
			return False
Пример #11
0
	async def get_text(self, url:str, **kwargs):
		p = kwargs.pop('proxy', None)
		if p:
			p = self.proxy
		dlimit = kwargs.pop("discord_limit", False)
		try:
			async with Timeout(kwargs.pop('timeout', 5)):
				async with self.session.get(url, proxy=p, **kwargs) as r:
					if dlimit:
						# Max 6 bytes per character (emojis)
						# 2000 discord char limit
						chunk = await r.content.read(6 * 2000)
						if not chunk:
							return False
						return chunk.decode("utf-8")

					return await r.text()
		except:
			return False
Пример #12
0
	async def _bytes_download(self, url:str, timeout:int=10, headers=None, **kwargs):
		p = kwargs.pop('proxy', None)
		if p:
			p = self.proxy

		limit = kwargs.pop('limit', None)
		async with Timeout(timeout):
			async with self.session.get(url, headers=headers, proxy=p, **kwargs) as r:
				r.raise_for_status()
				if limit:
					cl = int(r.headers.get('content-length', 0))
					if cl > limit:
						raise LimitError(f"Content exceeds size limit (> {cl} B)")
					chunk = await r.content.readexactly(cl)
					if not chunk:
						return False
					b = BytesIO(chunk)
				else:
					b = BytesIO(await r.read())
				b.seek(0)
				return b
Пример #13
0
 async def youtube_scrap(self, search: str, safety=False):
     try:
         headers = {
             'User-Agent':
             'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:43.0) Gecko/20100101 Firefox/43.0'
         }
         search = quote(search)
         api = 'https://www.youtube.com/results?search_query={0}'.format(
             search)
         cookies = {
             'PREF':
             'cvdm=grid&al=en&f4=4000000&f5=30&f1=50000000&f2=8000000'
         } if safety else None
         async with aiohttp.ClientSession(cookies=cookies) as session:
             with Timeout(5):
                 async with session.get(api, headers=headers) as r:
                     assert r.status == 200
                     txt = await r.text()
         root = etree.fromstring(txt, etree.HTMLParser(collect_ids=False))
         search_nodes = root.findall(
             ".//ol[@class='section-list']/li/ol[@class='item-section']/li")
         if not search_nodes:
             return False
         search_nodes.pop(0)
         result = False
         for node in search_nodes:
             if result:
                 break
             try:
                 url_node = node.find('div/div/div/h3/a')
                 if url_node is None:
                     continue
                 title = self.get_deep_text(url_node)
                 url = f"https://www.youtube.com/{url_node.attrib['href']}"
                 result = (title, url)
             except:
                 continue
         return result
     except:
         return False
Пример #14
0
    async def close(self, timeout=None):
        if not self._closed:
            self._closed = True

            msg = None
            if self._state == CallState.Terminated:
                msg = self._prepare_request('BYE')
            elif self._state != CallState.Completed:
                msg = self._prepare_request('CANCEL')

            if msg:
                transaction = UnreliableTransaction(self,
                                                    original_msg=msg,
                                                    loop=self.app.loop)
                self.transactions[msg.method][msg.cseq] = transaction

                try:
                    async with Timeout(timeout):
                        await transaction.start()
                finally:
                    self._close()

        self._close()
Пример #15
0
	async def get_mime(self, url):
		async with Timeout(10):
			async with self.session.head(url, proxy=self.proxy, allow_redirects=True) as r:
				r.raise_for_status()
				return r.headers.get('Content-Type', '').lower().split(';')[0]
Пример #16
0
 def request(self, command: Command, payload: dict, timeout=None):
     with Timeout(timeout or self.timeout, loop=self.loop):
         yield from self.connection.connect()
         self.connection.write(self.__create_request(command, payload))
         yield from self.connection.drain()
         return (yield from self.__get_response())