async def coroutine(): def extract_files(obj): fs, idx = {}, 0 def rec(v): if isinstance(v, list): return [rec(x) for x in v] if isinstance(v, dict): return {k: rec(v) for k, v in v.items()} nonlocal fs, idx if isinstance(v, tuple) and len(v) == 2 and isinstance( v[1], IOBase): name = str(v[0]) fs[name] = v[1] return 'attach://' + name if isinstance(v, IOBase): name = 'file_' + str(idx) idx += 1 fs[name] = v return 'attach://' + name return v return rec(obj), fs args, files = extract_files(kwargs) log = method not in self.trace_methods if log: logger.debug('calling method %s %s', method, args) if files: query = urlencode({ k: json.dumps(v) if isinstance(v, (list, dict)) else str(v) for k, v in args.items() }) writer = MultipartWriter('form-data') for n, file in files.items(): writer.append(file).set_content_disposition('attachment', filename=n) response = await self.__session.post(self.__url + method + '?' + query, data=writer) else: response = await self.__session.post(self.__url + method, json=args) data = DynamicDictObject(await response.json()) if log: logger.debug('received answer %s', data) if 'ok' in data: if data.ok and 'result' in data: return data.result if 'description' in data: raise RequestError('server error calling \'%s\': %s' % (method, data.description)) raise RequestError('bad response: %s' % data)
async def upload_file(url, username, password, file_path, responses): async with aiofiles.open(file_path, mode='rb') as f: file_contents = await f.read() mpw = MultipartWriter() part = mpw.append(file_contents) part.set_content_disposition('attachment', filename=path.basename(file_path)) form_data = { 'username': username, 'password': password, 'file': file_contents } async with ClientSession() as session: try: async with session.post(url, data=form_data) as response: response = await response.read() response_str = response.decode('utf-8') responses[response_str] = responses.get(response_str, 0) + 1 except ClientConnectionError: responses['CONNECTION_ERR'] = responses.get('CONNECTION_ERR', 0) + 1 except ClientPayloadError: responses['PAYLOAD_ERR'] = responses.get('PAYLOAD_ERR', 0) + 1 except ClientResponseError: responses['RESPONSE_ERR'] = responses.get('RESPONSE_ERR', 0) + 1
async def upload_file(url, username, password, file_path, runtime): async with aiofiles.open(file_path, mode='rb') as f: file_contents = await f.read() responses = runtime['responses'] mpw = MultipartWriter() part = mpw.append(file_contents) part.set_content_disposition('attachment', filename=path.basename(file_path)) fd = FormData() fd.add_field('file', file_contents, filename=path.basename(file_path)) fd.add_field('username', username) fd.add_field('password', password) async with ClientSession() as session: try: async with session.post(url, data=fd) as response: response = await response.read() response_str = response.decode('utf-8') if len(response_str) > 15: response_str = response_str[:12] + "..." responses[response_str] = responses.get(response_str, 0) + 1 except ClientConnectionError: responses['CONNECTION_ERR'] = responses.get('CONNECTION_ERR', 0) + 1 except ClientPayloadError: responses['PAYLOAD_ERR'] = responses.get('PAYLOAD_ERR', 0) + 1 except ClientResponseError: responses['RESPONSE_ERR'] = responses.get('RESPONSE_ERR', 0) + 1
async def _upload_data(self, media_id: str, data: bytes) -> MediaUploadResponse: max_size = 2**17 base_upload_req = self.upload_url.with_query({ "command": "APPEND", "media_id": media_id, }) for i in range(math.ceil(len(data) / max_size)): multipart_data = MultipartWriter("form-data") part = multipart_data.append(data[i * max_size:(i + 1) * max_size]) part.set_content_disposition("form-data", name="media", filename="blob") req = base_upload_req.update_query({"segment_index": i}) async with self.http.post(req, data=multipart_data, headers=self.headers) as resp: await check_error(resp) self.log.debug(f"Uploaded segment {i} of {media_id}") finalize_req = self.upload_url.with_query({ "command": "FINALIZE", "media_id": media_id, }) async with self.http.post(finalize_req, headers=self.headers) as resp: resp_data = await check_error(resp) processing_info = resp_data.get("processing_info", {}) if processing_info.get("state", None) == "pending": self.log.debug( f"Finished uploading {media_id}, but server is still processing it" ) check_after = processing_info.get("check_after_secs", 1) return await self._wait_processing(media_id, check_after) self.log.debug(f"Finished uploading {media_id}") return MediaUploadResponse.deserialize(resp_data)
async def create_thread_in_forum( self, channel_id: int, name: str, auto_archive_duration: int, message_payload: dict, applied_tags: List[str] = None, files: Optional[List[File]] = MISSING, rate_limit_per_user: Optional[int] = None, reason: Optional[str] = None, ) -> dict: """ From a given Forum channel, create a Thread with a message to start with. :param channel_id: The ID of the channel to create this thread in :param name: The name of the thread :param auto_archive_duration: duration in minutes to automatically archive the thread after recent activity, can be set to: 60, 1440, 4320, 10080 :param message_payload: The payload/dictionary contents of the first message in the forum thread. :param files: An optional list of files to send attached to the message. :param rate_limit_per_user: Seconds a user has to wait before sending another message (0 to 21600), if given. :param reason: An optional reason for the audit log :return: Returns a Thread in a Forum object with a starting Message. """ query = { "has_message": "True" } # TODO: Switch query after new feature breaking release. payload = {"name": name, "auto_archive_duration": auto_archive_duration} if rate_limit_per_user: payload["rate_limit_per_user"] = rate_limit_per_user if applied_tags: payload["applied_tags"] = applied_tags # payload.update(**{'use_nested_fields': 1}) data = None if files is not MISSING and len(files) > 0: data = MultipartWriter("form-data") part = data.append_json(payload) part.set_content_disposition("form-data", name="payload_json") payload = None for id, file in enumerate(files): part = data.append(file._fp, ) part.set_content_disposition("form-data", name=f"files[{str(id)}]", filename=file._filename) else: payload.update(message_payload) return await self._req.request( Route("POST", f"/channels/{channel_id}/threads?has_message=True"), json=payload, data=data, params=query, reason=reason, )
def payload_params_to_multipart(params: Params["Payload"]) -> "MultipartWriter": import json from multidict import CIMultiDict from aiohttp.multipart import MultipartWriter multipart = MultipartWriter(subtype="form-data") for key, payload in params.items(): multipart.append( payload.data, headers=CIMultiDict( ( (PAYLOAD_META_HEADER, json.dumps(payload.meta)), ("Content-Disposition", f'form-data; name="{key}"'), ) ), ) return multipart
def _encode_file(self, filename): """Encode file into multipart data. :param filename: Full path to the javascript file or zip bundle. :type filename: str | unicode :return: Multipart encoder object :rtype: aiohttp.MultipartWriter """ extension = os.path.splitext(filename)[1] if extension == '.js': # pragma: no cover source_type = 'application/javascript' elif extension == '.zip': source_type = 'application/zip' else: raise ValueError('File extension must be .zip or .js') mpwriter = MultipartWriter() mpwriter.append( open(filename, 'rb').read(), {'CONTENT-TYPE': source_type}) return mpwriter
def _create_task(path, loop, semaphore, session): default = "application/octet-stream" mime_type = (content_type or mimetypes.guess_type(str(path))[0] or default) # FIXME Nexus seems to not parse the Content-Disposition 'filename*' field properly. # data = FormData() # data.add_field("file", path.open("rb"), content_type=mime_type, filename=path.name) # FIXME This hack is to prevent sending Content-Disposition with the 'filename*' field. data = MultipartWriter("form-data") part = data.append(path.open("rb")) part.headers[CONTENT_TYPE] = mime_type part.headers[CONTENT_DISPOSITION] = f'form-data; name="file"; filename="{path.name}"' return loop.create_task(_upload(data, semaphore, session))
async def edit_message( self, channel_id: int, message_id: int, payload: dict, files: Optional[List[File]] = MISSING ) -> dict: """ Edits a message that already exists. :param channel_id: Channel snowflake ID. :param message_id: Message snowflake ID. :param payload: Any new data that needs to be changed. :param files: An optional list of files to send attached to the message. :type payload: dict :return: A message object with edited attributes. """ data = None if files is not MISSING and len(files) > 0: data = MultipartWriter("form-data") part = data.append_json(payload) part.set_content_disposition("form-data", name="payload_json") payload = None for id, file in enumerate(files): part = data.append( file._fp, ) part.set_content_disposition( "form-data", name="files[" + str(id) + "]", filename=file._filename ) return await self._req.request( Route( "PATCH", "/channels/{channel_id}/messages/{message_id}", channel_id=channel_id, message_id=message_id, ), json=payload, data=data, )
async def create_message( self, payload: dict, channel_id: int, files: Optional[List[File]] = MISSING ) -> dict: """ Send a message to the specified channel. :param payload: Dictionary contents of a message. (i.e. message payload) :param channel_id: Channel snowflake ID. :param files: An optional list of files to send attached to the message. :return dict: Dictionary representing a message (?) """ data = None if files is not MISSING and len(files) > 0: data = MultipartWriter("form-data") part = data.append_json(payload) part.set_content_disposition("form-data", name="payload_json") payload = None for id, file in enumerate(files): part = data.append( file._fp, ) part.set_content_disposition( "form-data", name="files[" + str(id) + "]", filename=file._filename ) request = await self._req.request( Route("POST", "/channels/{channel_id}/messages", channel_id=channel_id), json=payload, data=data, ) if request.get("id"): self.cache.messages.add(Item(id=request["id"], value=Message(**request))) return request
async def _process_multipart(reader: MultipartReader, writer: MultipartWriter) -> None: """Process multipart. Args: reader (MultipartReader): Response multipart to process. writer (MultipartWriter): Multipart to write out. """ while True: part = await reader.next() # noqa: B305 # https://github.com/PyCQA/flake8-bugbear/issues/59 if part is None: break if isinstance(part, MultipartReader): await _process_multipart(part, writer) elif part.headers.get("hdrs.CONTENT_TYPE"): if part.headers[hdrs.CONTENT_TYPE] == "application/json": part_data: Optional[Union[Text, Dict[Text, Any], List[Tuple[Text, Text]], bytes]] = await part.json() writer.append_json(part_data) elif part.headers[hdrs.CONTENT_TYPE].startswith("text"): part_data = await part.text() writer.append(part_data) elif part.headers[ hdrs. CONTENT_TYPE] == "application/www-urlform-encode": part_data = await part.form() writer.append_form(part_data) else: part_data = await part.read() writer.append(part_data) else: part_data = await part.read() if part.name: self.data.update({part.name: part_data}) elif part.filename: part_data = await part.read() self.data.update({part.filename: part_data}) writer.append(part_data)