def parse(self, stream, media_type=None, parser_context=None): """ Parses the incoming bytestream as a multipart encoded form, and returns a DataAndFiles object. `.data` will be a `QueryDict` containing all the form parameters. `.files` will be a `QueryDict` containing all the form files. """ parser_context = parser_context or {} request = parser_context["request"] encoding = parser_context.get("encoding", settings.DEFAULT_CHARSET) meta = request.META.copy() meta["CONTENT_TYPE"] = media_type upload_handlers = request.upload_handlers try: parser = DjangoMultiPartParser(meta, stream, upload_handlers, encoding) data, files = parser.parse() return DataAndFiles( underscoreize(data, **api_settings.JSON_UNDERSCOREIZE), underscoreize(files, **api_settings.JSON_UNDERSCOREIZE), ) except MultiPartParserError as exc: raise ParseError("Multipart form parse error - %s" % str(exc))
def parse(self, stream, media_type=None, parser_context=None): """ Parses the incoming bytestream as a multipart encoded form, and returns a DataAndFiles object. `.data` will be a `QueryDict` containing all the form parameters, and JSON decoded where available. `.files` will be a `QueryDict` containing all the form files. """ parser_context = parser_context or {} request = parser_context['request'] encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) meta = request.META.copy() meta['CONTENT_TYPE'] = media_type upload_handlers = request.upload_handlers try: parser = DjangoMultiPartParser(meta, stream, upload_handlers, encoding) data, files = parser.parse() for key in data: if data[key]: try: data[key] = json.loads(data[key]) except ValueError: pass return DataAndFiles(data, files) except MultiPartParserError as exc: raise ParseError('Multipart form parse error - %s' % six.text_type(exc))
def parse(self, stream, media_type=None, parser_context=None): parser_context = parser_context or {} request = parser_context['request'] encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) meta = request.META.copy() meta['CONTENT_TYPE'] = media_type upload_handlers = request.upload_handlers try: parser = MultiPartParser(meta, stream, upload_handlers, encoding) data, files = parser.parse() data = data.copy() for key in data: if data[key]: try: value = json.loads(data[key]) if isinstance(value, list): data.setlist(key, value) else: data[key] = value except ValueError: pass return DataAndFiles(data, files) except MultiPartParserError as exc: print traceback.format_exc() raise ParseError('Multipart form parse error - %s' % six.text_type(exc))
def parse(self, stream, media_type=None, parser_context=None): content = [line.strip().split(',') \ for line in stream.read().split('\n') if line.strip()] data = [{'uuid':row[1].strip('"'), 'events':[{'datetime':row[0].strip('"'), 'value':row[2].strip('"')}]} for row in content] return DataAndFiles(data, None)
def parse(self, stream, media_type=None, parser_context=None): result = super().parse(stream, media_type, parser_context) data_imtb = result.data.copy() file_imtb = result.files.copy() resolution = data_imtb.get('resolution') if resolution: data_imtb['resolution'] = json.loads(resolution) if not result.files: file_imtb['original'] = get_pic_from_url(data_imtb.get('original')) return DataAndFiles(data_imtb, file_imtb)
def parse(self, stream, media_type=None, parser_context=None): """ Treats the incoming bytestream as a raw file upload and returns a `DataAndFiles` object. `.data` will be the parsed content of the torrent's metainfo dictionary. `.files` will be None. """ request = parser_context['request'] request.upload_handlers = [TorrentFileUploadHandler()] data_and_files = super().parse(stream, media_type, parser_context) torrent_file = data_and_files.files['file'] metainfo_dict = self.parse_torrent_file(torrent_file) data = self.parse_metainfo_dict(metainfo_dict) return DataAndFiles(data, {})
def parse(self, stream, media_type=None, parser_context=None): """ Parses the incoming bytestream as a multipart encoded form, and returns a DataAndFiles object. `.data` will be a `QueryDict` containing all the form parameters. `.files` will be a `QueryDict` containing all the form files. For POSTs, accept Django request parsing. See issue #3951. """ parser_context = parser_context or {} request = parser_context['request'] _request = request._request if _request.method == 'POST': return DataAndFiles(_request.POST, _request.FILES) return super().parse(stream, media_type=media_type, parser_context=parser_context)
def parse(self, stream, media_type=None, parser_context=None): ''' Parses the incoming bytestream as a multipart encoded form, and returns a DataAndFiles object. `.data` will be a `QueryDict` containing all the form parameters. `.files` will be a `QueryDict` containing all the form files. ''' parser_context = parser_context or {} request = parser_context['request'] encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) meta = request.META.copy() meta['CONTENT_TYPE'] = media_type upload_handlers = request.upload_handlers try: parser = DjangoMultiPartParser(meta, stream, upload_handlers, encoding) data, files = parser.parse() return DataAndFiles(decamelize(data), decamelize(files), ) except MultiPartParserError as err: raise ParseError(f'Multipart form parse error - {err}')
def parse(self, stream, media_type=None, parser_context=None): result = super().parse(stream, media_type=media_type, parser_context=parser_context) data = {} for key, value in result.data.items(): if not isinstance(value, str): data[key] = value continue if '{' in value or '[' in value: try: data[key] = json.loads(value) except ValueError: data[key] = value else: data[key] = value qdict = QueryDict('', mutable=True) qdict.update(data) return DataAndFiles(qdict, result.files)
def parse(self, stream, media_type=None, parser_context=None): """Parse the incoming bytestream. Parses the incoming bytestream as a multipart encoded form and returns a DataAndFiles object. `.data` will be a `QueryDict` containing all the form parameters, and JSON decoded where available. `.files` will be a `QueryDict` containing all the form files. :param (bytes) stream: Incoming byte stream. :param (str) media_type: Media Type. :param (dict) parser_context: Context. """ parser_context = parser_context or {} request = parser_context["request"] encoding = parser_context.get("encoding", settings.DEFAULT_CHARSET) meta = request.META.copy() meta["CONTENT_TYPE"] = media_type upload_handlers = request.upload_handlers try: parser = DjangoMultiPartParser(meta, stream, upload_handlers, encoding) data, files = parser.parse() # get a dict of data to make it mutable _data = data.dict() for key in _data: if _data[key]: try: _data[key] = json.loads(_data[key]) except ValueError: pass return DataAndFiles(_data, files) except MultiPartParserError as exc: raise ParseError("Multipart form parse error - %s" % str(exc))
def parse(self, stream, media_type=None, parser_context=None): return DataAndFiles({'chunk': stream.body}, {})
def parse(self, stream, media_type=None, parser_context=None): content = stream.read() return DataAndFiles({}, content)
def parse(self, stream, media_type=None, parser_context=None): '''Treats the incoming bytestream as a raw file upload and returns a `DataAndFiles` object. `.data` will be None (we expect request body to be a file content). `.files` will be a `QueryDict` containing one 'file' element. ''' parser_context = parser_context or {} request = parser_context['request'] encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) meta = request.META upload_handlers = request.upload_handlers filename = "%s.sif" % (str(uuid.uuid4())) print("Filename for parser %s" % filename) # Note that this code is extracted from Django's handling of # file uploads in MultiPartParser. content_type = meta.get('HTTP_CONTENT_TYPE', meta.get('CONTENT_TYPE', '')) try: content_length = int( meta.get('HTTP_CONTENT_LENGTH', meta.get('CONTENT_LENGTH', 0))) except (ValueError, TypeError): content_length = None # See if the handler will want to take care of the parsing. for handler in upload_handlers: result = handler.handle_raw_input(stream, meta, content_length, None, encoding) if result is not None: return DataAndFiles({}, {'file': result[1]}) # This is the standard case. possible_sizes = [ x.chunk_size for x in upload_handlers if x.chunk_size ] chunk_size = min([2**31 - 4] + possible_sizes) chunks = ChunkIter(stream, chunk_size) counters = [0] * len(upload_handlers) for index, handler in enumerate(upload_handlers): try: handler.new_file(None, filename, content_type, content_length, encoding) except StopFutureHandlers: upload_handlers = upload_handlers[:index + 1] break for chunk in chunks: for index, handler in enumerate(upload_handlers): chunk_length = len(chunk) chunk = handler.receive_data_chunk(chunk, counters[index]) counters[index] += chunk_length if chunk is None: break for index, handler in enumerate(upload_handlers): file_obj = handler.file_complete(counters[index]) if file_obj is not None: return DataAndFiles({}, {'file': file_obj}) raise ParseError(self.errors['unhandled'])
def parse(self, stream, media_type, parser_context): data_and_files = super().parse(stream, media_type, parser_context) files = data_and_files.files.dict() data = self._load_json(data_and_files.data) return DataAndFiles(data, files)