def stream_csv(datafile): csvfile = StringIO.StringIO() csvwriter = csv.writer(csvfile) qs = getQuerySet(datafile) row = qs[0] db_map = [ (field.db_column or field.name, field.name) for field in row._meta.fields if field.name and field.name not in ( 'nmtk_id', 'nmtk_geometry', 'nmtk_feature_id')] def read_and_flush(): csvfile.seek(0) data = csvfile.read() csvfile.seek(0) csvfile.truncate() return data def data(): # Output the CSV headings csvwriter.writerow([i[0] for i in db_map]) for row in qs: csvwriter.writerow([getattr(row, mf) for dbf, mf in db_map]) data = read_and_flush() yield data response = HttpResponse(data(), content_type="text/csv") response.streaming = True response['Content-Disposition'] = "attachment; filename=results.csv" return response
def make_response(generator, format, name, encoding=None): """ @param data An iterator of rows, where every row is a list of strings @param format Either "csv" or "xls" @param name Base name for output file @param encoding Unicode encoding for data """ if format == 'csv': mimetype = 'application/csv' elif format == 'xls': mimetype = 'application/xls' else: raise Exception("Unknown format: %s" % format) # FIXME: this should be replaced with StreamingHttpResponse when we upgrade # to Django 1.5+. resp = HttpResponse(generator, mimetype=mimetype) resp['Content-Disposition'] = 'attachment; filename=%s.%s' % (name, format) resp.streaming = True try: del resp['Content-Length'] except KeyError: pass return resp
def stream_xls(datafile): try: qs=getQuerySet(datafile) row=qs[0] db_map=[(field.db_column or field.name, field.name) for field in row._meta.fields if field.name not in ('nmtk_id','nmtk_geometry', 'nmtk_feature_id')] font0 = xlwt.Font() font0.name = 'Times New Roman' font0.colour_index = 2 font0.bold = True font1=xlwt.Font() font1.name='Times New Roman' style0 = xlwt.XFStyle() style0.font = font0 style1 = xlwt.XFStyle() style1.font = font1 wb = xlwt.Workbook() ws = wb.add_sheet('NMTK Results') rowid=0 for i, v in enumerate(db_map): ws.write(rowid, i, v[0], style0) for row in qs: rowid += 1 for i, v in enumerate(db_map): ws.write(rowid, i, getattr(row, v[1]), style1) logger.debug('Getting ready to return XLS response...') response=HttpResponse(mimetype="application/ms-excel") response.streaming=True response['Content-Disposition']="attachment; filename=results.xls" wb.save(response) return response except: logger.exception('Something went wrong!')
def stream_csv(datafile): csvfile=StringIO.StringIO() csvwriter=csv.writer(csvfile) qs=getQuerySet(datafile) row=qs[0] db_map=[(field.db_column or field.name, field.name) for field in row._meta.fields if field.name and field.name not in ('nmtk_id', 'nmtk_geometry', 'nmtk_feature_id')] def read_and_flush(): csvfile.seek(0) data=csvfile.read() csvfile.seek(0) csvfile.truncate() return data def data(): # Output the CSV headings csvwriter.writerow([i[0] for i in db_map]) for row in qs: csvwriter.writerow([getattr(row, mf) for dbf, mf in db_map]) data=read_and_flush() yield data response=HttpResponse(data(), mimetype="text/csv") response.streaming=True response['Content-Disposition']="attachment; filename=results.csv" return response
def stream_xls(datafile): try: qs = getQuerySet(datafile) row = qs[0] # Contains the field names used in the model(s) db_map = [datafile.field_attributes[field]['field_name'] for field in datafile.fields] font0 = xlwt.Font() font0.name = 'Times New Roman' font0.colour_index = 2 font0.bold = True font1 = xlwt.Font() font1.name = 'Times New Roman' style0 = xlwt.XFStyle() style0.font = font0 style1 = xlwt.XFStyle() style1.font = font1 wb = xlwt.Workbook() ws = wb.add_sheet('NMTK Results') rowid = 0 # Generate the header fields. for i, v in enumerate(datafile.fields): ws.write(rowid, i, v, style0) for row in qs: rowid += 1 for i, v in enumerate(db_map): ws.write(rowid, i, getattr(row, v), style1) response = HttpResponse(content_type="application/ms-excel") response.streaming = True response['Content-Disposition'] = "attachment; filename=results.xls" wb.save(response) return response except: logger.exception('Something went wrong!')
def post(self, request, *args, **kwargs): email = request.POST.get('email') api_key = request.POST.get('api_key') version = request.POST.get('version') if not email or not api_key and not version: message = {'success': False, 'errors': []} if not email: message['errors'].append('Email is mandatory.') if not api_key: message['errors'].append('API key is mandatory.') if not version: message['errors'].append('Version is mandatory.') return HttpResponseBadRequest(json.dumps(message)) member = self.get_member(email, api_key) if member is None: return HttpResponseForbidden( json.dumps({'success': False, 'errors': ['Bad credentials.']})) if not member.is_active: return HttpResponseForbidden( json.dumps({'success': False, 'errors': ['Account not active.']})) try: package = member.package_set.get(version=version) except Package.DoesNotExist: return HttpResponseNotFound( json.dumps({'success': False, 'errors': ['Package not found.']})) response = HttpResponse( package.package.read(), mimetype='application/zip, application/octet-stream') response.streaming = True response['Content-Disposition'] = 'attachment; filename=package_version-%s.zip' \ % package.version return response
def stream_csv(datafile, include_wkt=False): csvfile = StringIO.StringIO() csvwriter = csv.writer(csvfile) qs = getQuerySet(datafile) row = qs[0] # db_map = [ # (field.db_column or field.name, # field.name) for field in row._meta.fields if field.name and field.name not in ( # 'nmtk_id', # 'nmtk_geometry', # 'nmtk_feature_id')] db_map = [ datafile.field_attributes[field]['field_name'] for field in datafile.fields ] def read_and_flush(): csvfile.seek(0) data = csvfile.read() csvfile.seek(0) csvfile.truncate() return data def data(): # Output the CSV headings csvwriter.writerow(datafile.fields) for row in qs: csvwriter.writerow([getattr(row, dbf) for dbf in db_map]) data = read_and_flush() yield data response = HttpResponse(data(), content_type="text/csv") response.streaming = True response['Content-Disposition'] = "attachment; filename=results.csv" return response
def export(self, events, metadata_names=None, feed_name="events"): ''' Creates an HTTP response of ical data representing the contents of the events sequence :param events: a sequence of events :param metadata_names: mapping between the metadata key and the ical property name. The key is the metadata key, the value is the name of the ical property. If the metadata value is a list, it will be output as multiple properties in the ical stream. ''' def generate_utf8(): ''' Generator function to produce ical data. All output should be UTF-8; all internal processing should be using Python Unicode objects. ''' yield "".join([ "BEGIN:VCALENDAR\r\n" "PRODID:", self.prodid, "\r\n" "VERSION:", self.version, "\r\n" ]).encode("utf-8") for e in events: try: event = self.make_event(e, metadata_names=metadata_names) yield event.to_ical() # always produces UTF-8 except: if self.stream_response: # We have to be careful here because HttpResponse seems to eat # all exceptions produced by generator functions and then # silently truncates the response without logging anything :-( try: LOG.exception("Error generating iCal feed") except: # Fall through to the final "ABORTED" yield below pass # In all cases, make sure we at least put something in the # response body itself to indicate we deliberately truncated # the file. # The only "safe" thing to do is to 'yield' a string literal; # anything else may cause another exception and produce silent # truncation again. yield b"===== ABORTED iCal generation due to error =====" return # Don't bother carrying on, no more generations else: # If we're not streaming, then raise the exception immediately # during the join before HttpResponse has a chance to eat it! raise yield "END:VCALENDAR\r\n".encode("utf-8") ical_body = generate_utf8() if not self.stream_response: ical_body = b"".join(ical_body) response = HttpResponse(ical_body, content_type="text/calendar; charset=utf-8") response[ 'Content-Disposition'] = "attachment; filename=%s.ics" % feed_name response.streaming = self.stream_response return response
def post(self, request, *args, **kwargs): email = request.POST.get("email") api_key = request.POST.get("api_key") version = request.POST.get("version") package_id = request.POST.get("package_id") if not email or not api_key and not version: message = {"success": False, "errors": []} if not email: message["errors"].append("Email is mandatory.") if not api_key: message["errors"].append("API key is mandatory.") if not version: message["errors"].append("Version is mandatory.") return HttpResponseBadRequest(json.dumps(message)) member = self.get_member(email, api_key) if member is None: return HttpResponseForbidden( json.dumps({ "success": False, "errors": ["Bad credentials."] })) if not member.is_active: return HttpResponseForbidden( json.dumps({ "success": False, "errors": ["Account not active."] })) if package_id and not member.package_set.get(pk=package_id): return HttpResponseForbidden( json.dumps({ "success": False, "errors": ["Access to this package is not allowed."], })) try: if package_id: package = member.package_set.only("package").get(pk=package_id) else: package = (member.package_set.filter( version=version).only("package").latest("update")) except Package.DoesNotExist: return HttpResponseNotFound( json.dumps({ "success": False, "errors": ["Package not found."] })) response = HttpResponse( package.package.read(), content_type="application/zip, application/octet-stream", ) response.streaming = True response["Content-Disposition"] = ( "attachment; filename=package_version-%s.zip" % package.version) return response
def post(self, request, *args, **kwargs): email = request.POST.get("email") api_key = request.POST.get("api_key") version = request.POST.get("version") package_id = request.POST.get("package_id") if not email or not api_key and not version: message = {"success": False, "errors": []} if not email: message["errors"].append("Email is mandatory.") if not api_key: message["errors"].append("API key is mandatory.") if not version: message["errors"].append("Version is mandatory.") return HttpResponseBadRequest(json.dumps(message)) member = self.get_member(email, api_key) if member is None: return HttpResponseForbidden( json.dumps({"success": False, "errors": ["Bad credentials."]}) ) if not member.is_active: return HttpResponseForbidden( json.dumps({"success": False, "errors": ["Account not active."]}) ) if package_id and not member.package_set.get(pk=package_id): return HttpResponseForbidden( json.dumps( { "success": False, "errors": ["Access to this package is not allowed."], } ) ) try: if package_id: package = member.package_set.only("package").get(pk=package_id) else: package = ( member.package_set.filter(version=version) .only("package") .latest("update") ) except Package.DoesNotExist: return HttpResponseNotFound( json.dumps({"success": False, "errors": ["Package not found."]}) ) response = HttpResponse( package.package.read(), content_type="application/zip, application/octet-stream", ) response.streaming = True response["Content-Disposition"] = ( "attachment; filename=package_version-%s.zip" % package.version ) return response
def post(self, request, *args, **kwargs): email = request.POST.get('email') api_key = request.POST.get('api_key') version = request.POST.get('version') package_id = request.POST.get('package_id') if not email or not api_key and not version: message = {'success': False, 'errors': []} if not email: message['errors'].append('Email is mandatory.') if not api_key: message['errors'].append('API key is mandatory.') if not version: message['errors'].append('Version is mandatory.') return HttpResponseBadRequest(json.dumps(message)) member = self.get_member(email, api_key) if member is None: return HttpResponseForbidden( json.dumps({ 'success': False, 'errors': ['Bad credentials.'] })) if not member.is_active: return HttpResponseForbidden( json.dumps({ 'success': False, 'errors': ['Account not active.'] })) if package_id and not member.package_set.get(pk=package_id): return HttpResponseForbidden( json.dumps({ 'success': False, 'errors': ['Access to this package is not allowed.'] })) try: if package_id: package = member.package_set.only('package').get(pk=package_id) else: package = member.package_set.filter( version=version).only('package').latest('update') except Package.DoesNotExist: return HttpResponseNotFound( json.dumps({ 'success': False, 'errors': ['Package not found.'] })) response = HttpResponse( package.package.read(), content_type='application/zip, application/octet-stream') response.streaming = True response['Content-Disposition'] = ( 'attachment; filename=package_version-%s.zip' % package.version) return response
def song_stream(request, *args, **kwargs): song_pk = kwargs.get("pk") if song_pk: try: song = Song.objects.get(pk=song_pk) except Song.DoesNotExist: response = HttpResponse(status=status.HTTP_404_NOT_FOUND) else: file_size = os.path.getsize(song.path) mp3 = open(song.path, "rb").read() response = HttpResponse(content=mp3, content_type="audio/mpeg") response["Content-Length"] = file_size response.streaming = True else: response = HttpResponse(status=status.HTTP_400_BAD_REQUEST) return response
def stream_xls(datafile): try: qs = getQuerySet(datafile) row = qs[0] # Contains the field names used in the model(s) db_map = [ datafile.field_attributes[field]['field_name'] for field in datafile.fields ] font0 = xlwt.Font() font0.name = 'Times New Roman' font0.colour_index = 2 font0.bold = True font1 = xlwt.Font() font1.name = 'Times New Roman' style0 = xlwt.XFStyle() style0.font = font0 style1 = xlwt.XFStyle() style1.font = font1 wb = xlwt.Workbook() ws = wb.add_sheet('NMTK Results') rowid = 0 # Generate the header fields. for i, v in enumerate(datafile.fields): ws.write(rowid, i, v, style0) for row in qs: rowid += 1 for i, v in enumerate(db_map): ws.write(rowid, i, getattr(row, v), style1) response = HttpResponse(content_type="application/ms-excel") response.streaming = True response['Content-Disposition'] = "attachment; filename=results.xls" wb.save(response) return response except: logger.exception('Something went wrong!')
def __call__(self, request, *args, **kwargs): """ NB: Sends a `Vary` header so we don't cache requests that are different (OAuth stuff in `Authorization` header.) """ rm = request.method.upper() # Django's internal mechanism doesn't pick up # PUT request, so we trick it a little here. if rm == "PUT": coerce_put_post(request) actor, anonymous = self.authenticate(request, rm) if anonymous is CHALLENGE: return actor() else: handler = actor # Translate nested datastructs into `request.data` here. if rm in ('POST', 'PUT'): try: translate_mime(request) except MimerDataException: return rc.BAD_REQUEST if not hasattr(request, 'data'): if rm == 'POST': request.data = request.POST else: request.data = request.PUT if not rm in handler.allowed_methods: return HttpResponseNotAllowed(handler.allowed_methods) meth = getattr(handler, self.callmap.get(rm, ''), None) if not meth: raise Http404 # Support emitter through (?P<emitter_format>) and ?format=emitter # and lastly Accept: header processing em_format = self.determine_emitter(request, *args, **kwargs) if not em_format: request_has_accept = 'HTTP_ACCEPT' in request.META if request_has_accept and self.strict_accept: return rc.NOT_ACCEPTABLE em_format = self.default_emitter kwargs.pop('emitter_format', None) # Clean up the request object a bit, since we might # very well have `oauth_`-headers in there, and we # don't want to pass these along to the handler. request = self.cleanup_request(request) try: result = meth(request, *args, **kwargs) except Exception as e: result = self.error_handler(e, request, meth, em_format) try: emitter, ct = Emitter.get(em_format) fields = handler.fields if hasattr(handler, 'list_fields') and isinstance(result, (list, tuple, QuerySet, RawQuerySet)): fields = handler.list_fields if callable(fields): fields = fields(request, *args, **kwargs) except ValueError: result = rc.BAD_REQUEST result.content = "Invalid output format specified '%s'." % em_format return result status_code = 200 # If we're looking at a response object which contains non-string # content, then assume we should use the emitter to format that # content if self._use_emitter(result): status_code = result.status_code # Note: We can't use result.content here because that # method attempts to convert the content into a string # which we don't want. when # _is_string/_base_content_is_iter is False _container is # the raw data result = result._container srl = emitter(result, typemapper, handler, fields, anonymous) try: """ Decide whether or not we want a generator here, or we just want to buffer up the entire result before sending it to the client. Won't matter for smaller datasets, but larger will have an impact. """ if self.stream: stream = srl.stream_render(request) else: stream = srl.render(request) if not isinstance(stream, HttpResponse): resp = HttpResponse(stream, content_type=ct, status=status_code) else: resp = stream resp.streaming = self.stream return resp except HttpStatusCode as e: return e.response
def __call__(self, request, *args, **kwargs): """ NB: Sends a `Vary` header so we don't cache requests that are different (OAuth stuff in `Authorization` header.) """ rm = request.method.upper() # Django's internal mechanism doesn't pick up # PUT request, so we trick it a little here. if rm == "PUT": coerce_put_post(request) actor, anonymous = self.authenticate(request, rm) if anonymous is CHALLENGE: return actor() else: handler = actor # Translate nested datastructs into `request.data` here. if rm in ('POST', 'PUT'): try: translate_mime(request) except MimerDataException: return rc.BAD_REQUEST if not hasattr(request, 'data'): if rm == 'POST': request.data = request.POST else: request.data = request.PUT if not rm in handler.allowed_methods: return HttpResponseNotAllowed(handler.allowed_methods) meth = getattr(handler, self.callmap.get(rm, ''), None) if not meth: raise Http404 # Support emitter through (?P<emitter_format>) and ?format=emitter # and lastly Accept: header processing em_format = self.determine_emitter(request, *args, **kwargs) if not em_format: request_has_accept = 'HTTP_ACCEPT' in request.META if request_has_accept and self.strict_accept: return rc.NOT_ACCEPTABLE em_format = self.default_emitter kwargs.pop('emitter_format', None) # Clean up the request object a bit, since we might # very well have `oauth_`-headers in there, and we # don't want to pass these along to the handler. request = self.cleanup_request(request) try: result = meth(request, *args, **kwargs) except Exception as e: result = self.error_handler(e, request, meth, em_format) try: emitter, ct = Emitter.get(em_format) fields = handler.fields if hasattr(handler, 'list_fields') and isinstance( result, (list, tuple, QuerySet)): fields = handler.list_fields except ValueError: result = rc.BAD_REQUEST result.content = "Invalid output format specified '%s'." % em_format return result status_code = 200 # If we're looking at a response object which contains non-string # content, then assume we should use the emitter to format that # content if self._use_emitter(result): status_code = result.status_code # Note: We can't use result.content here because that # method attempts to convert the content into a string # which we don't want. when # _is_string/_base_content_is_iter is False _container is # the raw data result = result._container srl = emitter(result, typemapper, handler, fields, anonymous) try: """ Decide whether or not we want a generator here, or we just want to buffer up the entire result before sending it to the client. Won't matter for smaller datasets, but larger will have an impact. """ if self.stream: stream = srl.stream_render(request) else: stream = srl.render(request) if not isinstance(stream, HttpResponse): resp = HttpResponse(stream, content_type=ct, status=status_code) else: resp = stream resp.streaming = self.stream return resp except HttpStatusCode as e: return e.response
# Return serialized data emitter, ct = Emitter.get(em_format) srl = emitter(result, recurse_level, typemapper, handler, handler.fields, anonymous) try: """ Decide whether or not we want a generator here, or we just want to buffer up the entire result before sending it to the client. Won't matter for smaller datasets, but larger will have an impact. """ if self.stream: stream = srl.stream_render(request) else: stream = srl.render(request) resp = HttpResponse(stream, mimetype=ct) resp.streaming = self.stream return resp except HttpStatusCode, e: return e.response @staticmethod def cleanup_request(request): """ Removes `oauth_` keys from various dicts on the request object, and returns the sanitized version. """ for method_type in ('GET', 'PUT', 'POST', 'DELETE'): block = getattr(request, method_type, { }) if True in [ k.startswith("oauth_") for k in block.keys() ]: sanitized = block.copy()
def export(self, events, metadata_names=None, feed_name="events"): ''' export the events with an optional mapping of metadata to column headers. :param events: a sequence of events, that should stream :param metadata_names: a dict containing metadata key to csv column mappings. The key is the medatdata key, the value is the csv column name returns a http response that uses a generator to stream. ''' def generate(): csvfile = StringIO.StringIO() csvwriter = csv.writer(csvfile) # If a mapping has been provided, unpack columns = [ "id", "uid", "Title", "Location", "Start localtime", "Start Orign Timezone", "End localtime", "End Orign Timezone", ] if metadata_names is not None: for metadata_name, csvname in metadata_names.iteritems(): columns.append(csvname) csvwriter.writerow(columns) yield csvfile.getvalue() for e in events: csvfile = StringIO.StringIO() csvwriter = csv.writer(csvfile) columns = [ e.id, e.uid, e.title, e.location, DateConverter.from_datetime( e.start_local(), e.metadata.get("x-allday")).isoformat(), e.starttz, DateConverter.from_datetime( e.start_local(), e.metadata.get("x-allday")).isoformat(), e.endtz ] # If a mapping has been provided, unpack if metadata_names is not None: metadata = e.metadata for metadata_name, icalname in metadata_names.iteritems(): if metadata_name in metadata: o = metadata[metadata_name] if isinstance(o, list): columns.append(",".join(o)) else: columns.append(o) else: columns.append("") csvwriter.writerow(columns) yield csvfile.getvalue() response = HttpResponse(generate(), content_type="text/csv; charset=utf-8") response[ 'Content-Disposition'] = "attachment; filename=%s.csv" % feed_name response.streaming = True return response
def export(self, events, metadata_names=None, feed_name="events"): ''' Creates a streaming http response of json data representing the contents of the events sequence :param events: a sequence of events :param metadata_names: mapping between the metadata key and the json property name. The key is the metadata key, the value is the name of the json property. If the metadata value is a list, it will be output as multiple properties in the json stream. ''' def generate(): # I am not using the standard simplejoson encoders since I want to stream. yield "[\n" first = True for e in events: event = { 'summary': '%s' % e.title, 'start': DateConverter.from_datetime( e.start_local(), e.metadata.get("x-allday")).isoformat(), 'end': DateConverter.from_datetime( e.end_local(), e.metadata.get("x-allday")).isoformat(), 'start_origin': DateConverter.from_datetime( e.start_origin(), e.metadata.get("x-allday")).isoformat(), 'end_origin': DateConverter.from_datetime( e.end_origin(), e.metadata.get("x-allday")).isoformat(), 'start_origin_tz': e.starttz, 'end_origin_tz': e.endtz, 'allday': e.metadata.get("x-allday"), 'location': e.location, 'uid': e.uid } # If a mapping has been provided, unpack metadata = e.metadata protected = frozenset(event.keys()) if metadata_names is not None: for metadata_name, jsonname in metadata_names.iteritems(): if jsonname not in protected and metadata_name in metadata: event[jsonname] = metadata[metadata_name] else: for k, v in metadata.iteritems(): if k not in protected: event[k] = v if first: yield "%s" % json.dumps(event, indent=JSON_INDENT) first = False else: yield ",\n%s" % json.dumps(event, indent=JSON_INDENT) yield "\n]\n" response = HttpResponse(generate(), content_type=JSON_CONTENT_TYPE) response[ 'Content-Disposition'] = "attachment; filename=%s.json" % feed_name response.streaming = True return response
def export_as_csv(modeladmin, request, queryset): """ export a queryset to csv file """ #if not request.user.has_perm('adminactions.export'): # messages.error(request, _('Sorry you do not have rights to execute this action')) # return try: adminaction_requested.send(sender=modeladmin.model, action='export_as_csv', request=request, queryset=queryset) except ActionInterrupted as e: messages.error(request, str(e)) return cols = [(f.name, f.verbose_name) for f in queryset.model._meta.fields] initial = {'_selected_action': request.POST.getlist(helpers.ACTION_CHECKBOX_NAME), 'select_across': request.POST.get('select_across') == '1', 'action': request.POST.get('action'), 'date_format': 'd/m/Y', 'datetime_format': 'N j, Y, P', 'time_format': 'P', 'quotechar': '"', 'columns': [x for x, v in cols], 'quoting': csv.QUOTE_ALL, 'delimiter': ',', 'escapechar': '\\', } if 'apply' in request.POST: form = CSVOptions(request.POST) form.fields['columns'].choices = cols if form.is_valid(): try: adminaction_start.send(sender=modeladmin.model, action='export_as_csv', request=request, queryset=queryset, form=form) except ActionInterrupted as e: messages.error(request, str(e)) # return if hasattr(modeladmin, 'get_export_as_csv_filename'): filename = modeladmin.get_export_as_csv_filename(request, queryset) else: filename = "%s.csv" % queryset.model._meta.verbose_name_plural.lower().replace(" ", "_") # Build and export the csv file csvfile = StringIO.StringIO() writer = csv.writer(csvfile, escapechar=str(form.cleaned_data['escapechar']), delimiter=str(form.cleaned_data['delimiter']), quotechar=str(form.cleaned_data['quotechar']), quoting=int(form.cleaned_data['quoting'])) if form.cleaned_data.get('header', False): writer.writerow([f for f in form.cleaned_data['columns']]) def read_and_flush(): csvfile.seek(0) data = csvfile.read() csvfile.seek(0) csvfile.truncate() return data def data(): for obj in queryset_iterator(queryset): row = [] for fieldname in form.cleaned_data['columns']: value = get_field_value(obj, fieldname) if isinstance(value, datetime.datetime): value = dateformat.format(value, form.cleaned_data['datetime_format']) elif isinstance(value, datetime.date): value = dateformat.format(value, form.cleaned_data['date_format']) elif isinstance(value, datetime.time): value = dateformat.format(value, form.cleaned_data['time_format']) row.append(smart_str(value)) writer.writerow(row) data = read_and_flush() #import time #time.sleep(1) yield data try: response = HttpResponse(data(), mimetype='text/csv') response['Content-Disposition'] = 'attachment;filename="%s"' % filename.encode('us-ascii', 'replace') response.streaming = True except Exception as e: messages.error(request, "Error: (%s)" % str(e)) else: adminaction_end.send(sender=modeladmin.model, action='export_as_csv', request=request, queryset=queryset) return response else: form = CSVOptions(initial=initial) form.fields['columns'].choices = cols adminForm = helpers.AdminForm(form, modeladmin.get_fieldsets(request), {}, [], model_admin=modeladmin) media = modeladmin.media + adminForm.media tpl = 'adminactions/export_csv.html' ctx = {'adminform': adminForm, 'change': True, 'title': _('Export to CSV'), 'is_popup': False, 'save_as': False, 'has_delete_permission': False, 'has_add_permission': False, 'has_change_permission': True, 'queryset': queryset, 'opts': queryset.model._meta, 'app_label': queryset.model._meta.app_label, 'media': mark_safe(media)} return render_to_response(tpl, RequestContext(request, ctx))
def _get_response(self, request, output): """Handles various output types from HTTP method calls. 1. HttpResponse instance - assume content has already been encoded 2. HttpStatusCode instance - return a naked response without any content 3. test for a two-item tuple containing an HttpStatusCode instance and the content - the status code defines the response type and the second argument defines an optional entity-body which will be encoded based on the 'Accept' header 4. any other object - will use a standard 200 status code - this object will be encoded based on the 'Accept' header """ status = None content = None streaming = False # response already defined, so just return if isinstance(output, HttpResponse): return output # the status is returned, but with no content if isinstance(output, http.HttpStatusCode): status = output.status_code # see if the output is a status/content pair, otherwise # assume the output is strictly content elif output and type(output) in (list, tuple): if isinstance(output[0], http.HttpStatusCode): status = output[0].status_code content = output[1] else: content = output # none of the basic parsing passed, therefore the output is # the content else: content = output # test for a stream-type object i.e. a generator. if true, then # do not process the content any further since it would be consumed if inspect.isgenerator(content): streaming = True # if there is content then handle it appropriately if content is not None: if hasattr(request, 'accepttype'): # if marked as streaming, this accepttype is the assumed output accepttype = request.accepttype if not streaming: # attempt to resolve and encode the content based on the # accepttype content = self.resolve_fields(content) content = representation.encode(accepttype, content) response = HttpResponse(content, status=status, mimetype=accepttype) else: response = HttpResponse(content, status=status) else: response = HttpResponse(status=status) if streaming: response.streaming = True return response
def __call__(self, request, *args, **kwargs): rm = request.method.upper() if rm == "PUT": coerce_put_post(request) actor, anonymous = self.authenticate(request, rm) if anonymous is CHALLENGE: return actor() else: handler = actor if rm in ('POST', 'PUT'): try: translate_mime(request) except MimerDataException: return rc.BAD_REQUEST if not hasattr(request, 'data'): if rm == 'POST': request.data = request.POST else: request.data = request.PUT if not rm in handler.allowed_methods: return HttpResponseNotAllowed(handler.allowed_methods) meth = getattr(handler, self.callmap.get(rm, ''), None) if not meth: raise Http404 em_format = self.determine_emitter(request, *args, **kwargs) kwargs.pop('emitter_format', None) request = self.cleanup_request(request) try: result = meth(request, *args, **kwargs) except ValueError: result = rc.BAD_REQUEST result.content = 'Invalid arguments' try: emitter, ct = Emitter.get(em_format) fields = handler.fields if hasattr(handler, 'list_fields') and isinstance( result, (list, tuple, QuerySet)): fields = handler.list_fields except ValueError: result = rc.BAD_REQUEST result.content = "Invalid output format specified '%s'." % em_format return result status_code = 200 if isinstance(result, HttpResponse) and not result._is_string: status_code = result.status_code result = result._container srl = emitter(result, typemapper, handler, fields, anonymous) try: if self.stream: stream = srl.stream_render(request) else: stream = srl.render(request) if not isinstance(stream, HttpResponse): resp = HttpResponse(stream, mimetype=ct, status=status_code) else: resp = stream resp.streaming = self.stream return resp except HttpStatusCode, e: return e.response
try: """ Decide whether or not we want a generator here, or we just want to buffer up the entire result before sending it to the client. Won't matter for smaller datasets, but larger will have an impact. """ if self.stream: stream = srl.stream_render(request) else: stream = srl.render(request) if not isinstance(stream, HttpResponse): resp = HttpResponse(stream, mimetype=ct, status=status_code) else: resp = stream resp.streaming = self.stream if content_range: resp.status_code = 206 resp['Content-Range'] = content_range return resp except HttpStatusCode, e: return e.response @staticmethod def cleanup_request(request): """ Removes `oauth_` keys from various dicts on the request object, and returns the sanitized version. """
def empty_response(): empty = HttpResponse('') # designating response as 'streaming' forces ConditionalGetMiddleware to # not add a 'Content-Length: 0' header empty.streaming = True return empty
def __call__(self, request, *args, **kwargs): """ NB: Sends a `Vary` header so we don't cache requests that are different (OAuth stuff in `Authorization` header.) """ rm = request.method.upper() # Django's internal mechanism doesn't pick up # PUT request, so we trick it a little here. if rm == "PUT": coerce_put_post(request) actor, anonymous, error = self.authenticate(request, rm) if anonymous is CHALLENGE: return actor(error) else: handler = actor # Translate nested datastructs into `request.data` here. if rm in ("POST", "PUT"): try: translate_mime(request) except MimerDataException: return rc.BAD_REQUEST if not hasattr(request, "data"): if rm == "POST": request.data = request.POST else: request.data = request.PUT if rm not in handler.allowed_methods: return HttpResponseNotAllowed(handler.allowed_methods) meth = handler.method_handlers.get(rm, None) if not meth: raise Http404 # Support emitter through (?P<emitter_format>) and ?format=emitter # and lastly Accept: header processing em_format = self.determine_emitter(request, *args, **kwargs) if not em_format: # pragma: nocover # TODO: This should be fixed. The current implementation of determine_emitter defaults to json. # The only way to reach this block is to append ?format= in your URL. If the code reaches this block, # it will fail because neither self.strict_accept or self.default_emitter exist. It would be fixed by # implementing a child resource class, but that shouldn't be required. request_has_accept = "HTTP_ACCEPT" in request.META if request_has_accept and self.strict_accept: return rc.NOT_ACCEPTABLE em_format = self.default_emitter kwargs.pop("emitter_format", None) # Clean up the request object a bit, since we might # very well have `oauth_`-headers in there, and we # don't want to pass these along to the handler. request = _DESERIALIZERS[request.content_type]( self.cleanup_request(request)) try: data = request.POST if request.method == "POST" else request.GET status_code, result = meth(request, *args, **kwargs) except Exception as e: status_code = http.client.BAD_REQUEST result = self.error_handler(e, request, meth, em_format) try: emitter, ct = Emitter.get(em_format) except ValueError: # pragma: nocover result = rc.BAD_REQUEST result.content = "Invalid output format specified '%s'." % em_format return result # If we're looking at a response object which contains non-string # content, then assume we should use the emitter to format that # content if self._use_emitter(result): # pragma: nocover status_code = result.status_code # Note: We can't use result.content here because that # method attempts to convert the content into a string # which we don't want. when # _is_string/_base_content_is_iter is False _container is # the raw data result = result._container srl = emitter(result, handler, anonymous) try: """ Decide whether or not we want a generator here, or we just want to buffer up the entire result before sending it to the client. Won't matter for smaller datasets, but larger will have an impact. """ if self.stream: stream = srl.stream_render(request) else: stream = srl.render(request) if not isinstance(stream, HttpResponse): resp = HttpResponse(stream, content_type=ct, status=status_code) else: resp = stream resp.streaming = self.stream return resp except HttpStatusCode as e: return e.response