def pdf(request): person = Customer.objects.get(id=1) template = get_template('pdfkit.html') obtain_token = requests.post('https://boardingbay-back.paywithconnect.com/api/v1/auth/mobileUserToken/', json={"employee_id": "admin", "password": "******"}) headers = {'Authorization': "Circle " + obtain_token.json().get('data').get('token')} resp = requests.get('https://boardingbay-back.paywithconnect.com/api/v1/boarding/admin/users/17/', headers=headers) data = resp.json().get('data', None) html = template.render({'data': data}) options = { 'page-size': 'Letter', 'encoding': "UTF-8", } pdf = pdfkit.from_string(html, False, options) response = HttpResponse(pdf, content_type='application/pdf') response['Content-Disposition'] = 'attachment'; response['Content-Disposition'] = 'filename="report.pdf"' entity = CustomerPdf() entity.save() entity.pdf_file.save('invoice.pdf', ContentFile(response.getvalue()), save=True) import base64 bb = base64.b64encode(response.getvalue()) inv_path = sendfile(bb.decode('utf-8')) return response
def finish(pdf, order, shipping): response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = 'attachment; filename="pdfkit_out.pdf"' response.write(pdf) print "pdf invoice rendered" # email = EmailMessage('Ihre HOTPOT Bestellung', 'Ihre HOTPOT Bestellung', settings.DEFAULT_FROM_EMAIL, # ['*****@*****.**', '*****@*****.**']) # email.attach('hotpot_rechnung.pdf', response.getvalue(), 'application/pdf') # print "pdf attached to email" # email.send(fail_silently=False) # print "email sent" try: connection = mail.get_connection() except SMTPException as e: logger.error("Failed to open connection to mail server", exc_info=True, extra={'request': None, 'error':e}) return sender_address = settings.DEFAULT_FROM_EMAIL mail_list = [] # hotpot new invoice email hotpot_context = {'order': order, 'order_items': order.orderitem_set.all()} if shipping != 0: hotpot_context['shipping'] = Decimal(shipping).quantize(Decimal('0.01')) hotpot_text_message = render_to_string('email/hotpot_new_order.txt', hotpot_context) hotpot_mail = mail.EmailMessage('[HOTPOT] Neue Bestellung ist eingegangen', hotpot_text_message, sender_address, [settings.DEFAULT_FROM_EMAIL]) hotpot_mail.attach('hotpot_rechnung.pdf', response.getvalue(), 'application/pdf') mail_list.append(hotpot_mail) # subscriber new newsletter subscription email buyer_context = hotpot_context buyer_text_message = render_to_string('email/buyer_new_order.txt', buyer_context) buyer_mail = mail.EmailMessage('Ihre HOTPOT Bestellung', buyer_text_message, sender_address, [order.email]) buyer_mail.attach('hotpot_rechnung.pdf', response.getvalue(), 'application/pdf') mail_list.append(buyer_mail) for message in mail_list: try: message.connection = connection message.send() print "invoice email sent" except SMTPException as e: logger.error("Sending mail to %s failed" % (str(message.to)), exc_info=True, extra={'request': None, 'mailmessage': message, 'error': e}) connection.close()
def get_response(request): """Return information about HttpResponse object.""" a_dict = {} m_dict = {} context = {} response = HttpResponse() # Attributes: response.content = "some content" a_dict["content"] = response.content a_dict["charset"] = response.charset a_dict["status_code"] = response.status_code a_dict["reason_phrese"] = response.reason_phrase a_dict["streaming"] = response.streaming a_dict["closed"] = response.closed # Methods: m_dict["__setitem__(header, value)"] = response.__setitem__("test", "Test") m_dict["__getitem__(header)"] = response.__getitem__("test") m_dict["__delitem__(header)"] = response.__delitem__("test") m_dict["has_header(header)"] = response.has_header("test") m_dict["setdefault(headre, value)"] = response.setdefault("t", "test") m_dict["set_cookie(key, value='', max_age=None,\ expres=None, path='/', domain=None,\ secure=False, httponly=False,\ samesite=None)"] = response.set_cookie("some", "foo") m_dict["set_signed_cookie(key, value='', max_age=None,\ expres=None, path='/', domain=None,\ secure=False, httponly=False,\ samesite=None)"] = response.set_signed_cookie("foo", "foo") m_dict["delete_cookie(key, path='/', domain=None)"] =\ response.delete_cookie("foo") m_dict["close()"] = response.close() m_dict["write(content)"] = response.write("<p>CONTENT</p>") m_dict["flush()"] = response.flush() m_dict["tell()"] = response.tell() m_dict["getvalue()"] = response.getvalue() m_dict["readable()"] = response.readable() m_dict["seekable()"] = response.seekable() m_dict["writable()"] = response.writable() m_dict["writelines(lines)"] = response.writelines([" one", " two", " three"]) m_dict["lines"] = response.getvalue() context["a_dict"] = a_dict context["m_dict"] = m_dict return render(request, "response_object/response.html", context)
def update(request, result, wtype=0): cid = request.POST.get("cid", None) if cid: try: c = models.Carouse.objects.get(id=int(cid)) opType = request.POST.get("opType", "") if opType == "enable": isEnable = base_util.getPostAsBool(request, "isEnable") state = Status.Close.value if isEnable: state = Status.Open.value if c.state != state: c.update_time = timezone.now() c.state = state c.save() result["isSuccess"] = True elif opType == "update": form = CarouseForm(instance=c) html = HttpResponse(form.as_p()) result["id"] = c.id result["sortId"] = c.sort_id result["html"] = html.getvalue().decode() result["isSuccess"] = True except Exception as e: _GG("Log").w(e) pass
def get_searchresults_csv(request): ids = request.GET.get('ids', '') if len(ids) > 0: searchresults_list = SearchResults.get_search_results_by_ids_for_user_id( request.user.id, ids) response = HttpResponse(content_type='text/csv') response[ 'Content-Disposition'] = 'attachment; filename="searchresults.csv"' writer = csv.writer(response) writer.writerow([ 'ID', 'Title', 'Link to Content', 'Social Media Type', 'Date Added' ]) for searchresult in searchresults_list: writer.writerow([ searchresult['id'], searchresult['title'], searchresult['link_to_content'], searchresult['social_media_type'], searchresult['date_added'] ]) email = EmailMessage('Search Results CSV', 'Search results attached', '*****@*****.**', [request.user.email]) email.attach('searchresults.csv', response.getvalue(), 'text/csv') email.send() return response else: return HttpResponse('No data available to export')
def bank_report(request, department): department = Department.objects.get(id=department) staff_list = Staff.objects.filter(department=department.id) ct = { 'staff_list': staff_list, 'department': department } template = "wisapp/report/bank_details.html" template = get_template(template) html = template.render(ct) css_string = """@page { size: a4 portrait; margin: 1mm; counter-increment: page; }""" pdf_file = HTML(string=html, base_url=request.build_absolute_uri()).write_pdf( stylesheets=[CSS(string=css_string)],presentational_hints=True) #pdf_file = "" response = HttpResponse(pdf_file, content_type='application/pdf') response['Content-Disposition'] = 'filename="BANK DETAILS FOR STAFF OF '+department.name+' DEPARTMENT.pdf"' return response return HttpResponse(response.getvalue(), content_type='application/pdf')
def email_contract(self): unit = self.posting.unit try: contract_email = unit.contract_email_text content = contract_email.content subject = contract_email.subject except TAContractEmailText.DoesNotExist: content = DEFAULT_EMAIL_TEXT subject = DEFAULT_EMAIL_SUBJECT response = HttpResponse(content_type="application/pdf") response['Content-Disposition'] = 'inline; filename="%s-%s.pdf"' % ( self.posting.slug, self.application.person.userid) ta_form(self, response) to_email = self.application.person.email() if self.posting.contact(): from_email = self.posting.contact().email() else: from_email = settings.DEFAULT_FROM_EMAIL msg = EmailMultiAlternatives(subject, content, from_email, [to_email], headers={'X-coursys-topic': 'ta'}) msg.attach(('"%s-%s.pdf' % (self.posting.slug, self.application.person.userid)), response.getvalue(), 'application/pdf') msg.send()
def generate_report(request): start = request.GET.get('start') end = request.GET.get('end') print(start) print(end) if all((start, end)): template = 'reports/statistics_report.html' data = get_table_graph_data(start=start, end=end) print(f'data =================================\n{data}') context = {'data': data, 'start': start, 'end': end} template = get_template(template) html = template.render(context) css_string = """@page { size: a4 portrait; margin: 1mm; counter-increment: page; }""" pdf_file = HTML(string=html, base_url=request.build_absolute_uri()).write_pdf( stylesheets=[CSS(string=css_string)], presentational_hints=True) response = HttpResponse(pdf_file, content_type='application/pdf') response['Content-Disposition'] = 'filename="Report.pdf"' return response return HttpResponse(response.getvalue(), content_type='application/pdf') else: return HttpResponse('Please choose a date range')
def transform_to_pdf(response, pdfname, return_stringIO=False): """ call xhtml2pdf.pisa to convert html responce to pdf """ # response['mimetype'] = 'application/pdf' # TODO : on the fly filename from url # response['Content-Disposition'] = 'attachment; filename=%s.pdf' % pdfname content = response.content if not return_stringIO: new_response = HttpResponse(content="", mimetype="application/pdf") new_response["Content-Disposition"] = "attachment; filename=%s.pdf" % pdfname else: new_response = StringIO.StringIO() pdf = pisa.pisaDocument(StringIO.StringIO(content), new_response, link_callback=fetch_resources) if not pdf.err: if return_stringIO: pdf = new_response.getvalue() new_response.close() return pdf else: return new_response else: # TODO return error and redirect to default view return HttpResponse( "We had some errors in pdfMiddleWare : \ <br/><pre>%s</pre>" % pdf )
def test_stream_interface(self): r = HttpResponse('asdf') self.assertEqual(r.getvalue(), b'asdf') r = HttpResponse() self.assertIs(r.writable(), True) r.writelines(['foo\n', 'bar\n', 'baz\n']) self.assertEqual(r.content, b'foo\nbar\nbaz\n')
def test_stream_interface(self): r = HttpResponse("asdf") self.assertEqual(r.getvalue(), b"asdf") r = HttpResponse() self.assertEqual(r.writable(), True) r.writelines(["foo\n", "bar\n", "baz\n"]) self.assertEqual(r.content, b"foo\nbar\nbaz\n")
def test_stream_interface(self): r = HttpResponse('asdf') self.assertEqual(r.getvalue(), b'asdf') r = HttpResponse() self.assertEqual(r.writable(), True) r.writelines(['foo\n', 'bar\n', 'baz\n']) self.assertEqual(r.content, b'foo\nbar\nbaz\n')
def test_stream_interface(self): r = HttpResponse("asdf") self.assertEqual(r.getvalue(), b"asdf") r = HttpResponse() self.assertIs(r.writable(), True) r.writelines(["foo\n", "bar\n", "baz\n"]) self.assertEqual(r.content, b"foo\nbar\nbaz\n")
def return_dict_example(apic_em_ip): auth_token = get_token(apic_em_ip) auth_token = json.loads(HttpResponse.getvalue(auth_token).decode('utf-8')) auth_token = auth_token['response']['serviceTicket'] device_id = get_device_id(auth_token, apic_em_ip) #config = get_config(auth_token, apic_em_ip, device_id) config = 'No configuration available' return config
def staff_list_report(request): employment_status = request.GET.get('status') dept_id = request.GET.get('department') #------------------------------- date_of_retirement_date_of_employment = datetime.datetime.now() - datetime.timedelta(days=30*365) date_of_birth_date_of_employment = datetime.datetime.now() - datetime.timedelta(days=60*365) print("00000000000000000000000") print(date_of_retirement_date_of_employment) #still_working = Staff.objects.filter(date_of_employment__gt=latest_retirees_dob) still_working = Staff.objects.filter(Q(department=dept_id) & Q(date_of_employment__gt=date_of_retirement_date_of_employment) & Q(date_of_birth__gt=date_of_birth_date_of_employment)) retired = Staff.objects.filter(Q(department=dept_id) & Q(date_of_employment__lte=date_of_retirement_date_of_employment) | Q(date_of_birth__lte=date_of_birth_date_of_employment)) print(len(still_working)) print(len(retired)) file_name="" if(employment_status=="all"): staff_list = Staff.objects.filter(department=dept_id) print("++++++++++++++++++++++++++++++++++++") print("++++++++++++++++++++++++++++++++++++") print("++++++++++++++++++++++++++++++++++++") print("++++++++++++++++++++++++++++++++++++") print(len(staff_list)) print(dept_id) file_name="All Staff List "+str(datetime.date.today()) elif(employment_status=="Retired"): staff_list=retired file_name="All Retired Staff List "+str(datetime.date.today()) else: staff_list=still_working file_name="All Active Staff List "+str(datetime.date.today()) #------------------------------- ct = { 'staff_list': staff_list, 'file_name': file_name #'department': department } template = "wisapp/report/staff_list.html" template = get_template(template) html = template.render(ct) css_string = """@page { size: a4 portrait; margin: 1mm; counter-increment: page; }""" pdf_file = HTML(string=html, base_url=request.build_absolute_uri()).write_pdf( stylesheets=[CSS(string=css_string)],presentational_hints=True) #pdf_file = "" response = HttpResponse(pdf_file, content_type='application/pdf') response['Content-Disposition'] = 'filename="'+file_name+'.pdf"' return response return HttpResponse(response.getvalue(), content_type='application/pdf')
def render(path: str, params: dict, filename: str): template = get_template(path) html = template.render(params) response = io.BytesIO() pdf = pisa.pisaDocument(io.BytesIO(html.encode("UTF-8")), response) if not pdf.err: response = HttpResponse(response.getvalue(), content_type='application/pdf') #response['Content-Disposition'] = 'attachment;filename=%s.pdf' % filename return response else: return HttpResponse("Error Rendering PDF", status=400)
def render(path: str, params: dict, filename: str): template = get_template(path) html = template.render(params) response = io.BytesIO() pdf = pisa.pisaDocument(html.encode("UTF-8"), response) if not pdf.err: response = HttpResponse(response.getvalue(), content_type="application/pdf") response[ 'Content-Disposition'] = f'attachment; filename={filename}.pdf' return response else: return HttpResponse("ERROR", status=400)
def req_test(request): output = "<html><title>Welcome to Django.</title><body><p>This is Request Test!</p>" mess = request.scheme output += 'scheme = '+ mess + '<br>' mess = str(request.body) output += 'body = '+ mess + '<br>' mess = request.path output += 'path = '+ mess + '<br>' mess = request.path_info output += 'path_info = '+ mess + '<br>' if request.method == 'GET': mess = 'Method = '+'GET' + '<br>' elif request.method == 'POST': mess = 'Method = '+'POST' + '<br>' output += mess # request.encoding = 'utf-8' mess = settings.DEFAULT_CHARSET output += 'DEFAULT_CHARSET = '+ mess + '<br>' mess = str(request.encoding) output += 'encoding = '+ mess + '<br>' mess = request.META['HTTP_ACCEPT_ENCODING'] output += 'HTTP_ACCEPT_ENCODING = '+ mess + '<br>' if request.user.is_authenticated(): mess = 'Hi User' else: mess = 'Hi Anonimouse!' output += 'User = '******'<br>' output += 'Host = '+ request.get_host() + '<br>' output += 'Path = '+ request.get_full_path() + '<br>' output += "</body></html>" response = HttpResponse(output) # response = HttpResponse(output, content_type='application/vnd.ms-excel') # response['Content-Disposition'] = 'attachment; filename="foo.xls"' response['Age'] = 120 # Установка заголовков print(response.charset) print(response.status_code) # 200 print(response.reason_phrase) # OK print(response.content) print(response.getvalue()) return response
def SendingMailToCompany(request, company_slug, job_slug): company = get_object_or_404(Company, slug=company_slug) job = get_object_or_404(JobListing, slug=job_slug) applications = JobApplications.objects.filter( job=job, action_by_team_leader="shortlisted") from_email = settings.EMAIL_HOST_USER to = '*****@*****.**' site_name = "AnalyticsVidhya" domain = request.META['HTTP_HOST'] subject_template_name = 'company/mail_to_company.txt' email_template_name = 'company/shortlisted_form.html' subject_content = get_template(subject_template_name) email_content = get_template(email_template_name) context = RequestContext(request, locals()) sub_content = subject_content.render(context) mail_content = email_content.render(context) html_message = mail_content application_id_list = [ application.jobseeker.id for application in applications if not application.mail_to_company ] jobseeker_queryset = JobSeeker.objects.filter(id__in=application_id_list) queryset = jobseeker_queryset columns = ('id', 'name', 'work_exp', 'ug_course', 'ug_institute_name', 'pg_course', 'pg_institute_name', 'ctc', 'current_employer', 'current_designation', 'current_location') workbook = queryset_to_workbook(queryset, columns) response = HttpResponse(content_type='application/vnd.ms-excel') response[ 'Content-Disposition'] = 'attachment; filename="shortlisted_candidate.xls' workbook.save(response) ##return response message = EmailMessage(subject='ritu', body=html_message, from_email=from_email, to=[to]) message.attach( 'shortlisted_candidate.xls', response.getvalue(), "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet") message.send() for application in applications: application.mail_to_company = True application.save() return HttpResponse('mail successfully sent')
def index(request): template = loader.get_template('apic/index.html') auth_token = get_token(apic_em_ip) auth_token = json.loads(HttpResponse.getvalue(auth_token).decode('utf-8')) auth_token = auth_token['response']['serviceTicket'] device_id = get_device_id(auth_token, apic_em_ip) config = get_config(auth_token, apic_em_ip, device_id) #output = config['response'].split('\n') context = { #'output': output, 'ticket': auth_token, 'deviceID': device_id, } return HttpResponse(template.render(context, request))
def practice(request): requests.packages.urllib3.disable_warnings() api_call = "/facts" url = catfacts_ip + api_call header = '"text/html; charset=utf-8"' #get the cat fact - type will return as requests.models.Response my_response = requests.get(url, params='number=5', verify=False) #take the "utf-8" response value and convert it to a json disctionary data = json.loads(HttpResponse.getvalue(my_response).decode('utf-8')) #save one of the facts out to a variable response = data['facts'][0] return HttpResponse(response)
def hello(request): response = HttpResponse() # response 常见方法 response.write("Welcome to Django1!") response.write("Welcome to Django2!") response.writelines( ["Welcome to Django3!", "Welcome to Django4!", "Welcome to Django5!"]) print(response.getvalue()) print(response.writable()) response.__setitem__("age", "30") print(response.has_header("age")) print(response.__getitem__("age")) print(response.get("age")) response.__delitem__("age") response['address'] = "北京" print(response.get("address")) return response
def req_test(request): output = "<html><title>Welcome to Django.</title><body><p>This is Request Test!</p>" mess = request.scheme output += 'scheme = ' + mess + '<br>' mess = str(request.body) output += 'body = ' + mess + '<br>' mess = request.path output += 'path = ' + mess + '<br>' mess = request.path_info output += 'path_info = ' + mess + '<br>' if request.method == 'GET': mess = 'Method = ' + 'GET' + '<br>' elif request.method == 'POST': mess = 'Method = ' + 'POST' + '<br>' output += mess # request.encoding = 'utf-8' mess = settings.DEFAULT_CHARSET output += 'DEFAULT_CHARSET = ' + mess + '<br>' mess = str(request.encoding) output += 'encoding = ' + mess + '<br>' mess = request.META['HTTP_ACCEPT_ENCODING'] output += 'HTTP_ACCEPT_ENCODING = ' + mess + '<br>' if request.user.is_authenticated(): mess = 'Hi User' else: mess = 'Hi Anonimouse!' output += 'User = '******'<br>' output += 'Host = ' + request.get_host() + '<br>' output += 'Path = ' + request.get_full_path() + '<br>' output += "</body></html>" response = HttpResponse(output) # response = HttpResponse(output, content_type='application/vnd.ms-excel') # response['Content-Disposition'] = 'attachment; filename="foo.xls"' response['Age'] = 120 # Установка заголовков print(response.charset) print(response.status_code) # 200 print(response.reason_phrase) # OK print(response.content) print(response.getvalue()) return response
def index_calculate_prediction(request, context): """ Function for safely calculating prediction and saving it in session. :param request: HttpRequest Http request with session. :param context: Dict Existing context. """ necessary_fields = ('input_data', 'input_menu') no_error_context = check_content(necessary_fields, request.FILES, context) alg_settings = AlgorithmSettings.objects.get(user=request.user) if no_error_context: try: result = HttpResponse() make_prediction(request.FILES['input_data'], request.FILES['input_menu'], result, alg_settings.model_file) request.session['result'] = result.getvalue().decode() except Exception: context['incorrect_data'] = True
def colortransfer(request): json_request = False returndata = {"success": False} if request.method == "POST": if request.POST.get("json", False): json_request = json.loads(request.POST["json"].lower()) if request.FILES.get("source_image", None) is not None: source_image = _grab_image(stream=request.FILES["source_image"]) else: source_url = request.POST.get("source_url", None) if source_url is None: returndata["error"] = "No URL provided." return JsonResponse(returndata) source_image = _grab_image(url=source_url) if request.FILES.get("target_image", None) is not None: target_image = _grab_image(stream=request.FILES["target_image"]) else: target_url = request.POST.get("target_url", None) if target_url is None: returndata["error"] = "No URL provided." return JsonResponse(returndata) target_image = _grab_image(url=url) transfer = color_transfer(source_image, target_image, clip=True, preserve_paper=True) transfer = cv2.cvtColor(transfer, cv2.COLOR_BGR2RGB) img = Image.fromarray(transfer, 'RGB') response = HttpResponse(content_type="image/jpeg") img.save(response, "JPEG") if not json_request: return response img_str = base64.b64encode(response.getvalue()) img_base64 = (bytes("data:image/jpeg;base64,", encoding='utf-8') + img_str).decode("utf-8") returndata.update({"success": True, "image": img_base64}) return JsonResponse(returndata)
def form_valid(self, form): self.object = form.save(commit=False) self.object.save() if 'sigt' in self.request.POST: me = pdfcrowd.HtmlToPdfClient('ariaschmario', os.getenv('PDFCROWD_PASSWORD')) secundarios = CentroCargaSecundario.objects.filter( principal=Ticket.objects.get( superId=self.kwargs['slug']).centro_carga) context = { 'ticket': Ticket.objects.get(superId=self.kwargs['slug']), 'secundarios': secundarios } x = render_to_string('pdf.html', context) me.setPageSize(u'Letter') response = HttpResponse(content_type='application/pdf') me.convertStringToStream(x, response) storage_client = storage.Client() bucket = storage_client.bucket('elelectricista') blob = bucket.blob('boletas/' + self.kwargs['slug'] + '.pdf') filePfd = response.getvalue() blob.upload_from_string(filePfd, content_type='application/pdf') Ticket.objects.get(superId=self.kwargs['slug']).update_file_url( 'https://storage.cloud.google.com/elelectricista/boletas/' + self.kwargs['slug'] + '.pdf') try: mail_content = "Te adjuntamos la boleta técnica de la visita de El Electricista" sender_address = '*****@*****.**' #sender_pass = self.access_secret_version(os.getenv('PROJECT_SECRET_MANAGER_ID'), os.getenv('GMAIL_APP_PASSWORD_ID'), 1), sender_pass = os.getenv('GMAIL_PASSWORD_ID') receiver_address = Ticket.objects.get( superId=self.kwargs['slug']).client.email # Setup the MIME message = MIMEMultipart() message['From'] = sender_address message['To'] = receiver_address message['Subject'] = 'El Electricista Boleta' # The subject line # The body and the attachments for the mail message.attach(MIMEText(mail_content, 'plain')) # attach_file = open(filePfd, 'rb') # Open the file as binary mode payload = MIMEBase('application', 'octate-stream') payload.set_payload(filePfd) encoders.encode_base64(payload) # encode the attachment # add payload header with filename filename = "boleta.pdf" payload.add_header('Content-Disposition', 'attachment; filename="%s"' % filename) message.attach(payload) # Create SMTP session for sending the mail session = smtplib.SMTP('smtp.gmail.com', 587) # use gmail with port session.starttls() # enable security session.login(sender_address, sender_pass) # login with mail_id and password text = message.as_string() session.sendmail(sender_address, receiver_address, text) session.quit() return redirect("core:sended", slug=self.kwargs['slug'], sended=True) except: return redirect("core:sended", slug=self.kwargs['slug'], sended=False) else: return redirect("core:circuitosramales", slug=self.kwargs['slug'])
def api_report( self, request, reporttype=None, from_date=None, to_date=None, object_profile=None, filter_default=None, exclude_zero=True, interface_profile=None, selector=None, administrative_domain=None, columns=None, description=None, o_format=None, enable_autowidth=False, **kwargs, ): def load(mo_ids): # match = {"links.mo": {"$in": mo_ids}} match = {"int.managed_object": {"$in": mo_ids}} group = { "_id": "$_id", "links": { "$push": { "iface_n": "$int.name", # "iface_id": "$int._id", # "iface_descr": "$int.description", # "iface_speed": "$int.in_speed", # "dis_method": "$discovery_method", # "last_seen": "$last_seen", "mo": "$int.managed_object", "linked_obj": "$linked_objects", } }, } value = (get_db()["noc.links"].with_options( read_preference=ReadPreference.SECONDARY_PREFERRED).aggregate( [ { "$unwind": "$interfaces" }, { "$lookup": { "from": "noc.interfaces", "localField": "interfaces", "foreignField": "_id", "as": "int", } }, { "$match": match }, { "$group": group }, ], allowDiskUse=True, )) res = defaultdict(dict) for v in value: if v["_id"]: for vv in v["links"]: if len(vv["linked_obj"]) == 2: mo = vv["mo"][0] iface = vv["iface_n"] for i in vv["linked_obj"]: if mo != i: res[mo][i] = iface[0] return res def translate_row(row, cmap): return [row[i] for i in cmap] def str_to_float(str): return float("{0:.3f}".format(float(str))) cols = [ "object_id", "object_name", "object_address", "object_platform", "object_adm_domain", "object_segment", "object_container", # "object_hostname", # "object_status", # "profile_name", # "object_profile", # "object_vendor", "iface_name", "iface_description", "iface_speed", "max_load_in", "max_load_in_time", "max_load_out", "max_load_out_time", "avg_load_in", "avg_load_out", "total_in", "total_out", "uplink_iface_name", "uplink_iface_description", "uplink_iface_speed", "uplink_max_load_in", "uplink_max_load_in_time", "uplink_max_load_out", "uplink_max_load_out_time", "uplink_avg_load_in", "uplink_avg_load_out", "uplink_total_in", "uplink_total_out", ] header_row = [ "ID", _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_PLATFORM"), _("OBJECT_ADMDOMAIN"), _("OBJECT_SEGMENT"), _("CONTAINER_ADDRESS"), _("IFACE_NAME"), _("IFACE_DESCRIPTION"), _("IFACE_SPEED"), _("MAX_LOAD_IN, Mbps"), _("MAX_LOAD_IN_TIME"), _("MAX_LOAD_OUT, Mbps"), _("MAX_LOAD_OUT_TIME"), _("AVG_LOAD_IN, Mbps"), _("AVG_LOAD_OUT, Mbps"), _("TOTAL_IN, Mbyte"), _("TOTAL_OUT, Mbyte"), _("UPLINK_IFACE_NAME"), _("UPLINK_IFACE_DESCRIPTION"), _("UPLINK_IFACE_SPEED"), _("UPLINK_MAX_LOAD_IN, Mbps"), _("UPLINK_MAX_TIME_IN"), _("UPLINK_MAX_LOAD_OUT, Mbps"), _("UPLINK_MAX_TIME_OUT"), _("UPLINK_AVG_LOAD_IN, Mbps"), _("UPLINK_AVG_LOAD_OUT, Mbps"), _("UPLINK_TOTAL_IN, Mbyte"), _("UPLINK_TOTAL_OUT, Mbyte"), ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) columns_order = columns.split(",") columns_filter = set(columns_order) r = [translate_row(header_row, cmap)] # Date Time Block if not from_date: from_date = datetime.datetime.now() - datetime.timedelta(days=1) else: from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y") if not to_date or from_date == to_date: to_date = from_date + datetime.timedelta(days=1) else: to_date = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) diff = to_date - from_date # Load managed objects mos = ManagedObject.objects.filter(is_managed=True) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if selector: mos = mos.filter( ManagedObjectSelector.objects.get(id=int(selector)).Q) if administrative_domain: mos = mos.filter( administrative_domain__in=AdministrativeDomain.get_nested_ids( int(administrative_domain))) if object_profile: mos = mos.filter(object_profile=object_profile) if interface_profile: interface_profile = InterfaceProfile.objects.filter( id=interface_profile).first() mo_attrs = namedtuple("MOATTRs", [c for c in cols if c.startswith("object")]) containers_address = {} if "object_container" in columns_filter: containers_address = ReportContainerData( set(mos.values_list("id", flat=True))) containers_address = dict(list(containers_address.extract())) moss = {} for row in mos.values_list("bi_id", "name", "address", "platform", "administrative_domain__name", "segment", "id"): moss[row[0]] = mo_attrs(*[ row[6], row[1], row[2], smart_text(Platform.get_by_id(row[3]) if row[3] else ""), row[4], smart_text(NetworkSegment.get_by_id(row[5])) if row[5] else "", containers_address. get(row[6], "") if containers_address and row[6] else "", ]) report_metric = ReportInterfaceMetrics(tuple(sorted(moss)), from_date, to_date, columns=None) report_metric.SELECT_QUERY_MAP = { (0, "managed_object", "id"): "managed_object", (1, "path", "iface_name"): "arrayStringConcat(path)", ( 2, "", "iface_description", ): "dictGetString('interfaceattributes','description' , (managed_object, arrayStringConcat(path)))", ( 3, "", "profile", ): "dictGetString('interfaceattributes', 'profile', (managed_object, arrayStringConcat(path)))", ( 4, "speed", "iface_speed", ): "dictGetUInt64('interfaceattributes', 'in_speed', (managed_object, arrayStringConcat(path)))", (5, "load_in_max", "load_in_max"): "divide(max(load_in),1048576)", (6, "load_out_max", "load_out_max"): "divide(max(load_out),1048576)", (7, "max_load_in_time", "max_load_in_time"): "argMax(ts,load_in)", (8, "max_load_out_time", "max_load_out_time"): "argMax(ts,load_out)", (9, "avg_load_in", "avg_load_in"): "divide(avg(load_in),1048576)", (10, "avg_load_out", "avg_load_out"): "divide(avg(load_out),1048576)", } ifaces_metrics = defaultdict(dict) for row in report_metric.do_query(): avg_in = str_to_float(row[9]) avg_out = str_to_float(row[10]) total_in = avg_in * diff.total_seconds() / 8 total_out = avg_out * diff.total_seconds() / 8 ifaces_metrics[row[0]][row[1]] = { "description": row[2], "profile": row[3], "bandwidth": row[4], "max_load_in": str_to_float(row[5]), "max_load_out": str_to_float(row[6]), "max_load_in_time": row[7], "max_load_out_time": row[8], "avg_load_in": avg_in, "avg_load_out": avg_out, "total_in": float("{0:.1f}".format(total_in)), "total_out": float("{0:.1f}".format(total_out)), } # find uplinks links = {} if cmap[-1] > 17: mos_id = list(mos.values_list("id", flat=True)) uplinks = {obj: [] for obj in mos_id} for d in ObjectData._get_collection().find( {"_id": { "$in": mos_id }}, { "_id": 1, "uplinks": 1 }): uplinks[d["_id"]] = d.get("uplinks", []) rld = load(mos_id) for mo in uplinks: for uplink in uplinks[mo]: if rld[mo]: if mo in links: links[mo] += [rld[mo][uplink]] else: links[mo] = [rld[mo][uplink]] for mo_bi in ifaces_metrics: mo_id = moss[int(mo_bi)] mo_ids = getattr(mo_id, "object_id") for i in ifaces_metrics[mo_bi]: if not exclude_zero: if (ifaces_metrics[mo_bi][i]["max_load_in"] == 0 and ifaces_metrics[mo_bi][i]["max_load_out"] == 0): continue if description: if description not in ifaces_metrics[mo_bi][i][ "description"]: continue if interface_profile: if interface_profile.name not in ifaces_metrics[mo_bi][i][ "profile"]: continue row2 = [ mo_ids, getattr(mo_id, "object_name"), getattr(mo_id, "object_address"), getattr(mo_id, "object_platform"), getattr(mo_id, "object_adm_domain"), getattr(mo_id, "object_segment"), getattr(mo_id, "object_container"), i, ifaces_metrics[mo_bi][i]["description"], ifaces_metrics[mo_bi][i]["bandwidth"], ifaces_metrics[mo_bi][i]["max_load_in"], ifaces_metrics[mo_bi][i]["max_load_in_time"], ifaces_metrics[mo_bi][i]["max_load_out"], ifaces_metrics[mo_bi][i]["max_load_out_time"], ifaces_metrics[mo_bi][i]["avg_load_in"], ifaces_metrics[mo_bi][i]["avg_load_out"], ifaces_metrics[mo_bi][i]["total_in"], ifaces_metrics[mo_bi][i]["total_out"], "", "", "", "", "", "", "", "", "", "", "", ] ss = True if mo_ids in links: for ifname_uplink in links[mo_ids]: if ifname_uplink in ifaces_metrics[mo_bi]: row2[18] = ifname_uplink row2[19] = ifaces_metrics[mo_bi][ifname_uplink][ "description"] row2[20] = ifaces_metrics[mo_bi][ifname_uplink][ "bandwidth"] row2[21] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_in"] row2[22] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_in_time"] row2[23] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_out"] row2[24] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_out_time"] row2[25] = ifaces_metrics[mo_bi][ifname_uplink][ "avg_load_in"] row2[26] = ifaces_metrics[mo_bi][ifname_uplink][ "avg_load_out"] row2[27] = ifaces_metrics[mo_bi][ifname_uplink][ "total_in"] row2[28] = ifaces_metrics[mo_bi][ifname_uplink][ "total_out"] r += [translate_row(row2, cmap)] ss = False if ss: r += [translate_row(row2, cmap)] filename = "metrics_detail_report_%s" % datetime.datetime.now( ).strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr("%s.csv" % filename, f.read()) zf.filename = "%s.csv.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.csv.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Metrics") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
def api_report( self, request, o_format, is_managed=None, administrative_domain=None, selector=None, pool=None, segment=None, avail_status=False, columns=None, ids=None, enable_autowidth=False, ): def row(row): def qe(v): if v is None: return "" if isinstance(v, str): return smart_text(v) elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] type_columns = ["Up/10G", "Up/1G", "Up/100M", "Down/-", "-"] cols = [ "object1_admin_domain", # "id", "object1_name", "object1_address", "object1_platform", "object1_segment", "object1_tags", "object1_iface", "object1_descr", "object1_speed", "object2_admin_domain", "object2_name", "object2_address", "object2_platform", "object2_segment", "object2_tags", "object2_iface", "object2_descr", "object2_speed", "link_proto", "last_seen", ] header_row = [ "OBJECT1_ADMIN_DOMAIN", "OBJECT1_NAME", "OBJECT1_ADDRESS", "OBJECT1_PLATFORM", "OBJECT1_SEGMENT", "OBJECT1_TAGS", "OBJECT1_IFACE", "OBJECT1_DESCR", "OBJECT1_SPEED", "OBJECT2_ADMIN_DOMAIN", "OBJECT2_NAME", "OBJECT2_ADDRESS", "OBJECT2_PLATFORM", "OBJECT2_SEGMENT", "OBJECT2_TAGS", "OBJECT2_IFACE", "OBJECT2_DESCR", "OBJECT2_SPEED", "LINK_PROTO", "LAST_SEEN", ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] if "interface_type_count" in columns.split(","): r[-1].extend(type_columns) # self.logger.info(r) # self.logger.info("---------------------------------") # print("-----------%s------------%s" % (administrative_domain, columns)) p = Pool.get_by_name(pool or "default") mos = ManagedObject.objects.filter() if request.user.is_superuser and not administrative_domain and not selector and not segment: mos = ManagedObject.objects.filter(pool=p) if ids: mos = ManagedObject.objects.filter(id__in=[ids]) if is_managed is not None: mos = ManagedObject.objects.filter(is_managed=is_managed) if pool: mos = mos.filter(pool=p) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if administrative_domain: ads = AdministrativeDomain.get_nested_ids( int(administrative_domain)) mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if segment: segment = NetworkSegment.objects.filter(id=segment).first() if segment: mos = mos.filter(segment__in=segment.get_nested_ids()) mos_id = list(mos.values_list("id", flat=True)) rld = ReportLinksDetail(mos_id) mo_resolv = { mo[0]: mo[1:] for mo in ManagedObject.objects.filter().values_list( "id", "administrative_domain__name", "name", "address", "segment", "platform", "labels", ) } for link in rld.out: if len(rld.out[link]) != 2: # Multilink or bad link continue s1, s2 = rld.out[link] seg1, seg2 = None, None if "object1_segment" in columns.split( ",") or "object2_segment" in columns.split(","): seg1, seg2 = mo_resolv[s1["mo"][0]][3], mo_resolv[s2["mo"] [0]][3] plat1, plat2 = None, None if "object1_platform" in columns.split( ",") or "object2_platform" in columns.split(","): plat1, plat2 = mo_resolv[s1["mo"][0]][4], mo_resolv[s2["mo"] [0]][4] r += [ translate_row( row([ mo_resolv[s1["mo"][0]][0], mo_resolv[s1["mo"][0]][1], mo_resolv[s1["mo"][0]][2], "" if not plat1 else Platform.get_by_id(plat1), "" if not seg1 else NetworkSegment.get_by_id(seg1), ";".join(mo_resolv[s1["mo"][0]][5] or []), s1["iface_n"][0], s1.get("iface_descr")[0] if s1.get("iface_descr") else "", s1.get("iface_speed")[0] if s1.get("iface_speed") else 0, mo_resolv[s2["mo"][0]][0], mo_resolv[s2["mo"][0]][1], mo_resolv[s2["mo"][0]][2], "" if not plat2 else Platform.get_by_id(plat2), "" if not seg2 else NetworkSegment.get_by_id(seg2), ";".join(mo_resolv[s2["mo"][0]][5] or []), s2["iface_n"][0], s2.get("iface_descr")[0] if s2.get("iface_descr") else "", s2.get("iface_speed")[0] if s2.get("iface_speed") else 0, s2.get("dis_method", ""), s2.get("last_seen", ""), ]), cmap, ) ] filename = "links_detail_report_%s" % datetime.datetime.now().strftime( "%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr("%s.csv" % filename, f.read()) zf.filename = "%s.csv.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.csv.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Objects") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") # response = HttpResponse( # content_type="application/x-ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
def api_report( self, request, o_format, administrative_domain=None, selector=None, interface_profile=None, zero=None, def_profile=None, columns=None, enable_autowidth=False, ): def humanize_speed(speed): if not speed: return "-" for t, n in [(1000000, "G"), (1000, "M"), (1, "k")]: if speed >= t: if speed // t * t == speed: return "%d%s" % (speed // t, n) else: return "%.2f%s" % (float(speed) / t, n) return str(speed) def row(row): def qe(v): if v is None: return "" if isinstance(v, unicode): return v.encode("utf-8") elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] cols = [ "object_name", "object_address", "object_model", "object_software", "object_port_name", "object_port_profile_name", "object_port_status", "object_link_status", "object_port_speed", "object_port_duplex", "object_port_untagged_vlan", "object_port_tagged_vlans", ] header_row = [ "MANAGED_OBJECT", "OBJECT_ADDRESS", "OBJECT_MODEL", "OBJECT_SOFTWARE", "PORT_NAME", "PORT_PROFILE_NAME", "PORT_STATUS", "LINK_STATUS", "PORT_SPEED", "PORT_DUPLEX", "PORT_UNTAGGED_VLAN", "PORT_TAGGED_VLANS", ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] mo = {} if_p = {} DUPLEX = {True: "Full", False: "Half"} for ifp in InterfaceProfile.objects.filter(): if_p[ifp.id] = {"name": ifp.name} mos = ManagedObject.objects.filter(is_managed=True) if (request.user.is_superuser and not administrative_domain and not selector and not interface_profile): mos = ManagedObject.objects.filter(is_managed=True) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if administrative_domain: ads = AdministrativeDomain.get_nested_ids( int(administrative_domain)) mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) for o in mos: mo[o.id] = { "type": "managedobject", "id": str(o.id), "name": o.name, "status": o.is_managed, "address": o.address, "vendor": o.vendor, "version": o.version, "platform": o.platform, } mos_id = list(mos.values_list("id", flat=True)) rld = ReportInterfaceStatus(mos_id, zero, def_profile, interface_profile) for i in rld.out: untag, tagged = "", "" if i["subs"]: untag = i["subs"][0].get("untagged_vlan", "") tagged = list_to_ranges(i["subs"][0].get("tagged_vlans", [])) r += [ translate_row( row([ mo[i["managed_object"]]["name"], mo[i["managed_object"]]["address"], "%s %s" % ( str(mo[i["managed_object"]]["vendor"]), str(mo[i["managed_object"]]["platform"]), ), str(mo[i["managed_object"]]["version"]), i["name"], if_p[i["profile"]]["name"], "UP" if i["admin_status"] is True else "Down", "UP" if "oper_status" in i and i["oper_status"] is True else "Down", humanize_speed(i["in_speed"]) if "in_speed" in i else "-", DUPLEX.get(i["full_duplex"]) if "full_duplex" in i and "in_speed" in i else "-", untag, tagged, ]), cmap, ) ] filename = "interface_status_report_%s" % datetime.datetime.now( ).strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=";") writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Objects") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") # response = HttpResponse( # content_type="application/x-ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
def email_contract(self): unit = self.posting.unit try: contract_email = unit.contract_email_text content = contract_email.content subject = contract_email.subject except TAContractEmailText.DoesNotExist: content = DEFAULT_EMAIL_TEXT subject = DEFAULT_EMAIL_SUBJECT response = HttpResponse(content_type="application/pdf") response['Content-Disposition'] = 'inline; filename="%s-%s.pdf"' % (self.posting.slug, self.application.person.userid) ta_form(self, response) to_email = self.application.person.email() if self.posting.contact(): from_email = self.posting.contact().email() else: from_email = settings.DEFAULT_FROM_EMAIL msg = EmailMultiAlternatives(subject, content, from_email, [to_email], headers={'X-coursys-topic': 'ta'}) msg.attach(('"%s-%s.pdf' % (self.posting.slug, self.application.person.userid)), response.getvalue(), 'application/pdf') msg.send()
def index(request): return HttpResponse.getvalue()
def api_report( self, request, o_format, is_managed=None, administrative_domain=None, selector=None, pool=None, segment=None, avail_status=False, columns=None, ids=None, detail_stat=None, enable_autowidth=False, ): def row(row): def qe(v): if v is None: return "" if isinstance(v, unicode): return v.encode("utf-8") elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] type_columns = ["Up/10G", "Up/1G", "Up/100M", "Up/10M", "Down/-", "-"] cols = [ "id", "object_name", "object_address", "object_hostname", "object_status", "profile_name", "object_profile", "object_vendor", "object_platform", "object_attr_hwversion", "object_version", "object_attr_bootprom", "object_serial", "object_attr_patch", "auth_profile", "avail", "admin_domain", "container", "segment", "phys_interface_count", "link_count", "last_config_ts" # "discovery_problem" # "object_tags" # "sorted_tags" # "object_caps" # "interface_type_count" ] header_row = [ "ID", "OBJECT_NAME", "OBJECT_ADDRESS", "OBJECT_HOSTNAME", "OBJECT_STATUS", "PROFILE_NAME", "OBJECT_PROFILE", "OBJECT_VENDOR", "OBJECT_PLATFORM", "OBJECT_HWVERSION", "OBJECT_VERSION", "OBJECT_BOOTPROM", "OBJECT_SERIAL", "OBJECT_ATTR_PATCH", "AUTH_PROFILE", "AVAIL", "ADMIN_DOMAIN", "CONTAINER", "SEGMENT", "PHYS_INTERFACE_COUNT", "LINK_COUNT", "LAST_CONFIG_TS", ] # "DISCOVERY_PROBLEM" # "ADM_PATH # "DISCOVERY_PROBLEM" # "OBJECT_TAGS" # "SORTED_TAGS" # "OBJECT_CAPS" # "INTERFACE_TYPE_COUNT" if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] mos = self.get_report_object(request.user, is_managed, administrative_domain, selector, pool, segment, ids) columns_filter = set(columns.split(",")) mos_id = tuple(mos.order_by("id").values_list("id", flat=True)) mos_filter = None if detail_stat: ref = ReportModelFilter() ids = list(six.itervalues(ref.proccessed(detail_stat))) mos_filter = set(mos_id).intersection(ids[0]) mos_id = sorted(mos_filter) avail = {} if "avail" in columns_filter: avail = ObjectStatus.get_statuses(mos_id) link_count = iter(ReportObjectLinkCount(mos_id)) iface_count = iter(ReportObjectIfacesTypeStat(mos_id)) if "container" in columns_filter: container_lookup = iter(ReportContainerData(mos_id)) else: container_lookup = None if "object_serial" in columns_filter: container_serials = iter(ReportContainer(mos_id)) else: container_serials = None if "interface_type_count" in columns_filter: iss = iter(ReportObjectIfacesStatusStat(mos_id)) else: iss = None if "object_attr_patch" in columns_filter or "object_serial" in columns_filter: roa = iter(ReportObjectAttributes(mos_id)) else: roa = None hn = iter(ReportObjectsHostname1(mos_id)) rc = iter(ReportObjectConfig(mos_id)) # ccc = iter(ReportObjectCaps(mos_id)) if "adm_path" in columns_filter: ad_path = ReportAdPath() r[-1].extend([_("ADM_PATH1"), _("ADM_PATH1"), _("ADM_PATH1")]) if "interface_type_count" in columns_filter: r[-1].extend(type_columns) if "object_caps" in columns_filter: object_caps = ReportObjectCaps(mos_id) caps_columns = list(six.itervalues(object_caps.ATTRS)) ccc = iter(object_caps) r[-1].extend(caps_columns) if "object_tags" in columns_filter: r[-1].extend([_("OBJECT_TAGS")]) if "sorted_tags" in columns_filter: tags = set() for s in (ManagedObject.objects.filter().exclude( tags=None).values_list("tags", flat=True).distinct()): tags.update(set(s)) tags_o = sorted([t for t in tags if "{" not in t]) r[-1].extend(tags_o) if "discovery_problem" in columns.split(","): discovery_result = ReportDiscoveryResult(mos_id) discovery_result.safe_output = True discovery_result.unknown_value = ([""] * len(discovery_result.ATTRS), ) dp_columns = discovery_result.ATTRS dp = iter(discovery_result) r[-1].extend(dp_columns) for ( mo_id, name, address, is_managed, sa_profile, o_profile, auth_profile, ad, m_segment, vendor, platform, version, tags, ) in (mos.values_list( "id", "name", "address", "is_managed", "profile", "object_profile__name", "auth_profile__name", "administrative_domain__name", "segment", "vendor", "platform", "version", "tags", ).order_by("id").iterator()): if (mos_filter and mo_id not in mos_filter) or not mos_id: continue if container_serials: mo_serials = next(container_serials) else: mo_serials = [{}] if container_lookup: mo_continer = next(container_lookup) else: mo_continer = ("", ) if roa: serial, hw_ver, boot_prom, patch = next(roa)[0] # noqa else: serial, hw_ver, boot_prom, patch = "", "", "", "" # noqa r.append( translate_row( row([ mo_id, name, address, next(hn)[0], "managed" if is_managed else "unmanaged", Profile.get_by_id(sa_profile), o_profile, Vendor.get_by_id(vendor) if vendor else "", Platform.get_by_id(platform) if platform else "", hw_ver, Firmware.get_by_id(version) if version else "", boot_prom, # Serial mo_serials[0].get("serial", "") or serial, patch or "", auth_profile, _("Yes") if avail.get(mo_id, None) else _("No"), ad, mo_continer[0], NetworkSegment.get_by_id(m_segment) if m_segment else "", next(iface_count)[0], next(link_count)[0], next(rc)[0], ]), cmap, )) if "adm_path" in columns_filter: r[-1].extend([ad] + list(ad_path[ad])) if "interface_type_count" in columns_filter: r[-1].extend(next(iss)[0]) if "object_caps" in columns_filter: r[-1].extend(next(ccc)[0]) if "object_tags" in columns_filter: r[-1].append(",".join(tags if tags else [])) if "sorted_tags" in columns_filter: out_tags = [""] * len(tags_o) try: if tags: for m in tags: out_tags[tags_o.index(m)] = m except ValueError: logger.warning("Bad value for tag: %s", m) r[-1].extend(out_tags) if "discovery_problem" in columns_filter: r[-1].extend(next(dp)[0]) filename = "mo_detail_report_%s" % datetime.datetime.now().strftime( "%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Objects") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) # for ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") # response = HttpResponse( # content_type="application/x-ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
def pdf_preview(pdf): response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = 'attachment; filename="pdfkit_out.pdf"' response.write(pdf) return HttpResponse(response.getvalue(), content_type='application/pdf')
def api_report( self, request, reporttype=None, from_date=None, to_date=None, object_profile=None, filter_default=None, exclude_zero=None, interface_profile=None, selector=None, administrative_domain=None, columns=None, o_format=None, enable_autowidth=False, **kwargs ): def translate_row(row, cmap): return [row[i] for i in cmap] map_table = { "load_interfaces": "/Interface\s\|\sLoad\s\|\s[In|Out]/", "load_cpu": "/[CPU|Memory]\s\|\sUsage/", "errors": "/Interface\s\|\s[Errors|Discards]\s\|\s[In|Out]/", "ping": "/Ping\s\|\sRTT/", } cols = [ "id", "object_name", "object_address", "object_platform", "object_adm_domain", "object_segment", # "object_hostname", # "object_status", # "profile_name", # "object_profile", # "object_vendor", "iface_name", "iface_description", "iface_speed", "load_in", "load_in_p", "load_out", "load_out_p", "errors_in", "errors_out", "slot", "cpu_usage", "memory_usage", "ping_rtt", "ping_attempts", "interface_flap", "interface_load_url", ] header_row = [ "ID", "OBJECT_NAME", "OBJECT_ADDRESS", "OBJECT_PLATFORM", "OBJECT_ADM_DOMAIN", "OBJECT_SEGMENT", "IFACE_NAME", "IFACE_DESCRIPTION", "IFACE_SPEED", "LOAD_IN", "LOAD_IN_P", "LOAD_OUT", "LOAD_OUT_P", "ERRORS_IN", "ERRORS_OUT", "CPU_USAGE", "MEMORY_USAGE", "PING_RTT", "PING_ATTEMPTS", "INTERFACE_FLAP", "INTERFACE_LOAD_URL", ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) columns_order = columns.split(",") columns_filter = set(columns_order) r = [translate_row(header_row, cmap)] object_columns = [c for c in columns_order if c.startswith("object")] # Date Time Block if not from_date: from_date = datetime.datetime.now() - datetime.timedelta(days=1) else: from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y") if not to_date or from_date == to_date: to_date = from_date + datetime.timedelta(days=1) else: to_date = datetime.datetime.strptime(to_date, "%d.%m.%Y") + datetime.timedelta(days=1) # interval = (to_date - from_date).days ts_from_date = time.mktime(from_date.timetuple()) ts_to_date = time.mktime(to_date.timetuple()) # Load managed objects mos = ManagedObject.objects.filter(is_managed=True) if not request.user.is_superuser: mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user)) if selector: mos = mos.filter(ManagedObjectSelector.objects.get(id=int(selector)).Q) if administrative_domain: mos = mos.filter( administrative_domain__in=AdministrativeDomain.get_nested_ids( int(administrative_domain) ) ) if object_profile: mos = mos.filter(object_profile=object_profile) # iface_dict = {} d_url = { "path": "/ui/grafana/dashboard/script/report.js", "rname": map_table[reporttype], "from": str(int(ts_from_date * 1000)), "to": str(int(ts_to_date * 1000)), # o.name.replace("#", "%23") "biid": "", "oname": "", "iname": "", } report_map = { "load_interfaces": { "url": "%(path)s?title=interface&biid=%(biid)s" "&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s", "q_group": ["interface"], "q_select": { (0, "managed_object", "id"): "managed_object", (1, "path", "iface_name"): "arrayStringConcat(path)", }, }, "errors": { "url": """%(path)s?title=errors&biid=%(biid)s&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s""", "q_group": ["interface"], }, "load_cpu": { "url": """%(path)s?title=cpu&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""", "q_select": { (0, "managed_object", "id"): "managed_object", (1, "path", "slot"): "arrayStringConcat(path)", }, }, "ping": { "url": """%(path)s?title=ping&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""", "q_select": {(0, "managed_object", "id"): "managed_object"}, }, } query_map = { # "iface_description": ('', 'iface_description', "''"), "iface_description": ( "", "iface_description", "dictGetString('interfaceattributes','description' , (managed_object, arrayStringConcat(path)))", ), "iface_speed": ( "speed", "iface_speed", "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed))", ), "load_in": ("load_in", "l_in", "round(quantile(0.90)(load_in), 0)"), "load_in_p": ( "load_in", "l_in_p", "replaceOne(toString(round(quantile(0.90)(load_in) / " "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')", ), "load_out": ("load_out", "l_out", "round(quantile(0.90)(load_out), 0)"), "load_out_p": ( "load_out", "l_out_p", "replaceOne(toString(round(quantile(0.90)(load_out) / " "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')", ), "errors_in": ("errors_in", "err_in", "quantile(0.90)(errors_in)"), "errors_out": ("errors_out", "err_out", "quantile(0.90)(errors_out)"), "cpu_usage": ("usage", "cpu_usage", "quantile(0.90)(usage)"), "ping_rtt": ("rtt", "ping_rtt", "round(quantile(0.90)(rtt) / 1000, 2)"), "ping_attempts": ("attempts", "ping_attempts", "avg(attempts)"), } query_fields = [] for c in report_map[reporttype]["q_select"]: query_fields += [c[2]] field_shift = len(query_fields) # deny replacing field for c in columns.split(","): if c not in query_map: continue field, alias, func = query_map[c] report_map[reporttype]["q_select"][ (columns_order.index(c) + field_shift, field, alias) ] = func query_fields += [c] metrics_attrs = namedtuple("METRICSATTRs", query_fields) mo_attrs = namedtuple("MOATTRs", [c for c in cols if c.startswith("object")]) moss = {} for row in mos.values_list( "bi_id", "name", "address", "platform", "administrative_domain__name", "segment" ): moss[row[0]] = mo_attrs( *[ row[1], row[2], str(Platform.get_by_id(row[3]) if row[3] else ""), row[4], str(NetworkSegment.get_by_id(row[5])) if row[5] else "", ] ) url = report_map[reporttype].get("url", "") report_metric = self.metric_source[reporttype]( tuple(sorted(moss)), from_date, to_date, columns=None ) report_metric.SELECT_QUERY_MAP = report_map[reporttype]["q_select"] if exclude_zero and reporttype == "load_interfaces": report_metric.CUSTOM_FILTER["having"] += ["max(load_in) != 0 AND max(load_out) != 0"] if interface_profile: interface_profile = InterfaceProfile.objects.filter(id=interface_profile).first() report_metric.CUSTOM_FILTER["having"] += [ "dictGetString('interfaceattributes', 'profile', " "(managed_object, arrayStringConcat(path))) = '%s'" % interface_profile.name ] # OBJECT_PLATFORM, ADMIN_DOMAIN, SEGMENT, OBJECT_HOSTNAME for row in report_metric.do_query(): mm = metrics_attrs(*row) mo = moss[int(mm.id)] res = [] for y in columns_order: if y in object_columns: res += [getattr(mo, y)] else: res += [getattr(mm, y)] if "interface_load_url" in columns_filter: d_url["biid"] = mm.id d_url["oname"] = mo[2].replace("#", "%23") # res += [url % d_url, interval] res.insert(columns_order.index("interface_load_url"), url % d_url) r += [res] filename = "metrics_detail_report_%s" % datetime.datetime.now().strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and ( r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]] ): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response["Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
def orderitems_csv(request): """Return all processible orderitems as csv.""" now = datetime.now() now_as_str = now.strftime('%Y-%m-%dT%H_%M_%S') # Create the HttpResponse object with the appropriate CSV header. response = HttpResponse( content_type='text/csv', headers={ 'Content-Disposition': f'attachment; filename="{now_as_str}.csv"' }, ) response.write(u'\ufeff'.encode('utf8')) writer = csv.writer(response, delimiter=';') writer.writerow([ 'Bestellnummer', 'Vorname', 'Name', 'Straße', 'PLZ', 'Ort', 'Land', 'Artikelnummer', 'Artikelname', 'Preis (Brutto)', 'Menge', 'Positionstyp', 'Anmerkung', 'EMAIL' ]) orderitems = (OrderItem.objects.filter(fulfillment_status='PROCESSABLE'). select_related('order__delivery_address').order_by( 'order__marketplace_order_number')) print(f'Found {len(orderitems)} orderitems') current_order = None current_order_id = None for oi in orderitems: # Append shipping information at the end of an order (after all orderitems) if current_order_id and current_order_id != oi.order.marketplace_order_number: price = '%0.2f' % current_order.delivery_fee[0][ 'deliveryFeeAmount']['amount'] price = price.replace('.', ',') writer.writerow([ current_order.marketplace_order_number, current_order.delivery_address.first_name, current_order.delivery_address.last_name, f'{current_order.delivery_address.street} {current_order.delivery_address.house_number}', current_order.delivery_address.zip_code, current_order.delivery_address.city, _get_country_information( current_order.delivery_address.country_code), ' ', _get_delivery_info(current_order.delivery_fee[0]['name']), price, 1, 'Versandposition', f'OTTO {current_order.marketplace_order_id}', '*****@*****.**' ]) price = '%0.2f' % round(oi.price_in_cent / 100, 2) price = price.replace('.', ',') writer.writerow([ oi.order.marketplace_order_number, oi.order.delivery_address.first_name, oi.order.delivery_address.last_name, f'{oi.order.delivery_address.street} {oi.order.delivery_address.house_number}', oi.order.delivery_address.zip_code, oi.order.delivery_address.city, _get_country_information(oi.order.delivery_address.country_code), oi.sku, oi.product_title, price, 1, 'Artikel', f'OTTO {oi.order.marketplace_order_id}', '*****@*****.**' ]) current_order_id = oi.order.marketplace_order_number current_order = deepcopy(oi.order) if current_order: # extra locke for the last shipping position price = '%0.2f' % current_order.delivery_fee[0]['deliveryFeeAmount'][ 'amount'] price = price.replace('.', ',') writer.writerow([ current_order.marketplace_order_number, current_order.delivery_address.first_name, current_order.delivery_address.last_name, f'{current_order.delivery_address.street} {current_order.delivery_address.house_number}', current_order.delivery_address.zip_code, current_order.delivery_address.city, _get_country_information( current_order.delivery_address.country_code), ' ', _get_delivery_info(current_order.delivery_fee[0]['name']), price, 1, 'Versandposition', f'OTTO {current_order.marketplace_order_id}', '*****@*****.**' ]) email = request.GET.get('email') if email: LOG.info('Houston we got to send an email') if not settings.FROM_EMAIL_ADDRESS: LOG.error('settings.FROM_EMAIL_ADDRESS needs to be defined') return response if not settings.OTTO_ORDER_CSV_RECEIVER_LIST: LOG.error( 'settings.OTTO_ORDER_CSV_RECEIVER_LIST needs to be defined') return response LOG.info(f'settings.FROM_EMAIL_ADDRESS: {settings.FROM_EMAIL_ADDRESS}') LOG.info( f'settings.OTTO_ORDER_CSV_RECEIVER_LIST: {settings.OTTO_ORDER_CSV_RECEIVER_LIST}' ) message = EmailMessage( f'OTTO Bestellungen - {now.strftime("%Y/%m/%d")}', 'OTTO Bestellungen als csv - Frohes Schaffen!!', settings.FROM_EMAIL_ADDRESS, settings.OTTO_ORDER_CSV_RECEIVER_LIST, ) message.attach(f'{now.strftime("%Y/%m/%d")}_otto_bestellungen.csv', response.getvalue(), 'text/csv') number_of_messages = message.send() LOG.info(f'{number_of_messages} send') return response
def to_representation(self, obj): html = loader.get_template("views/create.html") response = HttpResponse(html.render()) return {'html': response.getvalue()}
def api_report( self, request, from_date, to_date, o_format, administrative_domain=None, columns=None, source="both", alarm_class=None, enable_autowidth=False, ): def row(row): def qe(v): if v is None: return "" if isinstance(v, str): return smart_text(v) elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return smart_text(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] cols = [ "id", "alarm_class", "alarm_from_ts", "alarm_to_ts", "alarm_tt", "object_name", "object_address", "object_admdomain", "log_timestamp", "log_source", "log_message", # "tt", # "escalation_ts", ] header_row = [ "ID", _("ALARM_CLASS"), _("ALARM_FROM_TS"), _("ALARM_TO_TS"), _("ALARM_TT"), _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_ADMDOMAIN"), _("LOG_TIMESTAMP"), _("LOG_SOURCE"), _("LOG_MESSAGE"), ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] fd = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) match = { "timestamp": { "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"), "$lte": fd } } mos = ManagedObject.objects.filter() ads = [] if administrative_domain: if administrative_domain.isdigit(): administrative_domain = [int(administrative_domain)] ads = AdministrativeDomain.get_nested_ids( administrative_domain[0]) if not request.user.is_superuser: user_ads = UserAccess.get_domains(request.user) if administrative_domain and ads: if administrative_domain[0] not in user_ads: ads = list(set(ads) & set(user_ads)) if not ads: return HttpResponse( "<html><body>Permission denied: Invalid Administrative Domain</html></body>" ) else: ads = user_ads if ads: mos = mos.filter(administrative_domain__in=ads) # Working if Administrative domain set if ads: try: match["adm_path"] = {"$in": ads} # @todo More 2 level hierarhy except bson.errors.InvalidId: pass addr_map = { mo[0]: (mo[1], mo[2]) for mo in mos.values_list("id", "name", "address") } # Active Alarms coll = ActiveAlarm._get_collection() for aa in coll.aggregate([ { "$match": match }, { "$unwind": "$log" }, { "$match": { "log.source": { "$exists": True, "$ne": None } } }, { "$project": { "timestamp": 1, "managed_object": 1, "alarm_class": 1, "escalation_tt": 1, "adm_path": 1, "log": 1, } }, { "$sort": { "_id": 1, "log.timestamp": 1 } }, ]): r += [ translate_row( row([ smart_text(aa["_id"]), AlarmClass.get_by_id(aa["alarm_class"]).name, aa["timestamp"], "", aa.get("escalation_tt", ""), addr_map[aa["managed_object"]][0], addr_map[aa["managed_object"]][1], AdministrativeDomain.get_by_id( aa["adm_path"][-1]).name, aa["log"]["timestamp"], aa["log"]["source"], aa["log"]["message"], ]), cmap, ) ] # Active Alarms coll = ArchivedAlarm._get_collection() for aa in coll.aggregate([ { "$match": match }, { "$unwind": "$log" }, { "$match": { "log.source": { "$exists": True } } }, { "$project": { "timestamp": 1, "clear_timestamp": 1, "managed_object": 1, "alarm_class": 1, "escalation_tt": 1, "adm_path": 1, "log": 1, } }, { "$sort": { "_id": 1, "log.timestamp": 1 } }, ]): r += [ translate_row( row([ smart_text(aa["_id"]), AlarmClass.get_by_id(aa["alarm_class"]).name, aa["timestamp"], aa["clear_timestamp"], aa.get("escalation_tt", ""), addr_map[aa["managed_object"]][0], addr_map[aa["managed_object"]][1], AdministrativeDomain.get_by_id( aa["adm_path"][-1]).name, aa["log"]["timestamp"], aa["log"]["source"], aa["log"]["message"], ]), cmap, ) ] filename = "alarm_comments.csv" if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s"' % filename writer = csv.writer(response) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerow(columns) writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr(filename, f.read()) zf.filename = "%s.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="alarm_comments.xlsx"' response.close() return response