def post(cls, request, *args, **kwargs): # Allow only post from superusers if not request.user.is_superuser: return HttpResponseNotAllowed( ["post"], content="Only a superuser can execute SQL statements" ) if request.method != "POST" or not request.is_ajax(): return HttpResponseForbidden("<h1>%s</h1>" % _("Permission denied")) if "format" in request.POST: formatted = sqlparse.format( request.POST.get("sql", ""), keyword_case="lower", identifier_case="lower", strip_comments=False, reindent=True, wrap_after=50, indent_tabs=False, indent_width=2, ) return JsonResponse({"formatted": formatted}) elif "save" in request.POST: if "id" in request.POST: m = SQLReport.objects.using(request.database).get(pk=request.POST["id"]) if m.user.id != request.user.id: return HttpResponseForbidden("You're not the owner of this report") f = SQLReportForm(request.POST, instance=m) else: f = SQLReportForm(request.POST) if f.is_valid(): try: m = f.save(commit=False) m.user = request.user m.save() return JsonResponse({"id": m.id, "status": "ok"}) except Exception as e: logger.error("Could not save report: %s" % e) return JsonResponse({"id": m.id, "status": "could not save report"}) else: return HttpResponseServerError("Error saving report") elif "delete" in request.POST: pk = request.POST["id"] SQLReport.objects.using(request.database).filter( pk=pk, user=request.user ).delete() messages.add_message( request, messages.INFO, _('The %(name)s "%(obj)s" was deleted successfully.') % {"name": _("my report"), "obj": pk}, ) return HttpResponse("ok") elif "test" in request.POST: return StreamingHttpResponse( content_type="application/json; charset=%s" % settings.DEFAULT_CHARSET, streaming_content=cls._generate_json_data( database=request.database, sql=request.POST["sql"] ), ) else: return HttpResponseNotAllowed("Unknown post request")
def inline_discussion(request, course_key, discussion_id): """ Renders JSON for DiscussionModules """ with function_trace('get_course_and_user_info'): course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=True) cc_user = cc.User.from_django_user(request.user) user_info = cc_user.to_dict() try: with function_trace('get_threads'): threads, query_params = get_threads( request, course, user_info, discussion_id, per_page=INLINE_THREADS_PER_PAGE) except ValueError: return HttpResponseServerError('Invalid group_id') with function_trace('get_metadata_for_threads'): annotated_content_info = utils.get_metadata_for_threads( course_key, threads, request.user, user_info) with function_trace('determine_group_permissions'): is_staff = has_permission(request.user, 'openclose_thread', course.id) course_discussion_settings = get_course_discussion_settings(course.id) group_names_by_id = get_group_names_by_id(course_discussion_settings) course_is_divided = course_discussion_settings.division_scheme is not CourseDiscussionSettings.NONE with function_trace('prepare_content'): threads = [ utils.prepare_content(thread, course_key, is_staff, course_is_divided, group_names_by_id) for thread in threads ] return utils.JsonResponse({ 'is_commentable_divided': is_commentable_divided(course_key, discussion_id), 'discussion_data': threads, 'user_info': user_info, 'user_group_id': get_group_id_for_user(request.user, course_discussion_settings), 'annotated_content_info': annotated_content_info, 'page': query_params['page'], 'num_pages': query_params['num_pages'], 'roles': utils.get_role_ids(course_key), 'course_settings': make_course_settings(course, request.user, False) })
def render_500(request): return HttpResponseServerError(render_to_string('static_templates/server-error.html', {}, request=request))
def thumbnail(request, lccn, date, edition, sequence): page = get_page(lccn, date, edition, sequence) try: im = _get_resized_image(page, settings.THUMBNAIL_WIDTH) except IOError, e: return HttpResponseServerError("Unable to create thumbnail: %s" % e)
except Exception, e: logger.exception(e) return HttpResponseServerError() # Must have received a message logger.debug("@@Got a text message") try: from_address = s['from']['id'] text = s['initialText'] logger.debug("@@Received message from %s: %s" % (from_address, text)) # pass the message to RapidSMS incoming(backend_name, from_address, text) # Respond nicely to Tropo t = Tropo() t.hangup() logger.debug("@@responding to tropo with hangup") return HttpResponse(t.RenderJson()) except Exception, e: logger.exception(e) logger.debug("@@responding to tropo with error") return HttpResponseServerError() else: # What? We don't expect any GET to our URL because # our Tropo app should be a Web API app. logger.error("@@Unexpected GET to tropo URL") return HttpResponseServerError()
def instructor_dashboard_2(request, course_id): """ Display the instructor dashboard for a course. """ try: course_key = CourseKey.from_string(course_id) except InvalidKeyError: log.error(u"Unable to find course with course key %s while loading the Instructor Dashboard.", course_id) return HttpResponseServerError() course = get_course_by_id(course_key, depth=0) access = { 'admin': request.user.is_staff, 'instructor': bool(has_access(request.user, 'instructor', course)), 'finance_admin': CourseFinanceAdminRole(course_key).has_user(request.user), 'sales_admin': CourseSalesAdminRole(course_key).has_user(request.user), 'staff': bool(has_access(request.user, 'staff', course)), 'forum_admin': has_forum_access(request.user, course_key, FORUM_ROLE_ADMINISTRATOR), } if not access['staff']: raise Http404() is_white_label = CourseMode.is_white_label(course_key) sections = [ _section_course_info(course, access), _section_membership(course, access, is_white_label), _section_cohort_management(course, access), _section_student_admin(course, access), _section_data_download(course, access), ] analytics_dashboard_message = None if settings.ANALYTICS_DASHBOARD_URL: # Construct a URL to the external analytics dashboard analytics_dashboard_url = '{0}/courses/{1}'.format(settings.ANALYTICS_DASHBOARD_URL, unicode(course_key)) link_start = "<a href=\"{}\" target=\"_blank\">".format(analytics_dashboard_url) analytics_dashboard_message = _( "To gain insights into student enrollment and participation {link_start}" "visit {analytics_dashboard_name}, our new course analytics product{link_end}." ) analytics_dashboard_message = analytics_dashboard_message.format( link_start=link_start, link_end="</a>", analytics_dashboard_name=settings.ANALYTICS_DASHBOARD_NAME) # Temporarily show the "Analytics" section until we have a better way of linking to Insights sections.append(_section_analytics(course, access)) # Check if there is corresponding entry in the CourseMode Table related to the Instructor Dashboard course course_mode_has_price = False paid_modes = CourseMode.paid_modes_for_course(course_key) if len(paid_modes) == 1: course_mode_has_price = True elif len(paid_modes) > 1: log.error( u"Course %s has %s course modes with payment options. Course must only have " u"one paid course mode to enable eCommerce options.", unicode(course_key), len(paid_modes) ) if settings.FEATURES.get('INDIVIDUAL_DUE_DATES') and access['instructor']: sections.insert(3, _section_extensions(course)) # Gate access to course email by feature flag & by course-specific authorization if bulk_email_is_enabled_for_course(course_key): sections.append(_section_send_email(course, access)) # Gate access to Metrics tab by featue flag and staff authorization if settings.FEATURES['CLASS_DASHBOARD'] and access['staff']: sections.append(_section_metrics(course, access)) # Gate access to Ecommerce tab if course_mode_has_price and (access['finance_admin'] or access['sales_admin']): sections.append(_section_e_commerce(course, access, paid_modes[0], is_white_label, is_white_label)) # Gate access to Special Exam tab depending if either timed exams or proctored exams # are enabled in the course # NOTE: For now, if we only have procotred exams enabled, then only platform Staff # (user.is_staff) will be able to view the special exams tab. This may # change in the future can_see_special_exams = ( ((course.enable_proctored_exams and request.user.is_staff) or course.enable_timed_exams) and settings.FEATURES.get('ENABLE_SPECIAL_EXAMS', False) ) if can_see_special_exams: sections.append(_section_special_exams(course, access)) # Certificates panel # This is used to generate example certificates # and enable self-generated certificates for a course. certs_enabled = CertificateGenerationConfiguration.current().enabled if certs_enabled and access['admin']: sections.append(_section_certificates(course)) disable_buttons = not _is_small_course(course_key) certificate_white_list = CertificateWhitelist.get_certificate_white_list(course_key) generate_certificate_exceptions_url = reverse( # pylint: disable=invalid-name 'generate_certificate_exceptions', kwargs={'course_id': unicode(course_key), 'generate_for': ''} ) certificate_exception_view_url = reverse( 'certificate_exception_view', kwargs={'course_id': unicode(course_key)} ) context = { 'course': course, 'old_dashboard_url': reverse('instructor_dashboard_legacy', kwargs={'course_id': unicode(course_key)}), 'studio_url': get_studio_url(course, 'course'), 'sections': sections, 'disable_buttons': disable_buttons, 'analytics_dashboard_message': analytics_dashboard_message, 'certificate_white_list': certificate_white_list, 'generate_certificate_exceptions_url': generate_certificate_exceptions_url, 'certificate_exception_view_url': certificate_exception_view_url } return render_to_response('instructor/instructor_dashboard_2/instructor_dashboard_2.html', context)
def handle_disconnect(request, workflow_id=None): """ Handle User Disconnection If we are here, mod_vulture has already delete the application session in redis According to the configuration of App, this handler will: - Display a "Logout Message" - Destroy the portal session - Redirect to the application (ie: display the Login portal) :param request: Django request object :returns: Self-service portal """ global_config = Cluster.get_global_config() """ Try to find the application with the requested URI """ try: workflow = Workflow.objects.get(pk=workflow_id) except: logger.error( "DISCONNECT::handle_disconnect: Unable to find workflow having id '{}'" .format(workflow_id)) return HttpResponseForbidden("Invalid Workflow.") """ Get portal_cookie name from cluster """ portal_cookie_name = global_config.portal_cookie_name """ Get portal cookie value (if exists) """ portal_cookie = request.COOKIES.get(portal_cookie_name, None) if portal_cookie: logger.debug( "DISCONNECT::handle_disconnect: portal_cookie Found: {}".format( portal_cookie)) else: logger.error( "DISCONNECT::handle_disconnect: portal_cookie not found !") return HttpResponseForbidden("Access Denied.") """ Connect to Redis """ r = REDISBase() if not r: logger.info("PORTAL::self: Unable to connect to REDIS !") return HttpResponseServerError() portal_session = REDISPortalSession(r, portal_cookie) """ The user do not have a portal session: Access is forbidden """ if not portal_session.exists(): return HttpResponseForbidden("Invalid session.") # FIXME """ Destroy portal session if needed """ if workflow.app_disconnect_portal: logger.info( "DISCONNECT::handle_disconnect: portal session '{}' has been destroyed" .format(portal_cookie)) portal_session.destroy() # FIXME """ Display Logout message if needed (otherwise redirect to application) """ if workflow.app_display_logout_message: template = workflow.template style = '<link rel="stylesheet" type="text/css" href="/' + str( global_config.public_token) + '/templates/portal_%s.css">' % (str( template.id)) logger.debug( "DISCONNECT::handle_disconnect: Display template '{}'".format( template.name)) return render_to_response("portal_%s_html_logout.conf" % (str(template.id)), { 'style': style, 'app_url': workflow.get_redirect_uri() }, context_instance=RequestContext(request)) else: logger.debug( "DISCONNECT::handle_disconnect: Redirecting to redirect_uri '{}'". format(workflow.get_redirect_uri())) return HttpResponseRedirect(workflow.get_redirect_uri())
def custom_500(request): """Custom 500 error handler.""" return HttpResponseServerError(render(request, '500.html'))
def message_received(request, backend_name): """Handle HTTP requests from Tropo. """ #logger.debug("@@Got request from Tropo: %s" % request) if request.method == 'POST': logger.debug("@@ Raw data: %s" % request.raw_post_data) try: post = json.loads(request.raw_post_data) except Exception, e: logger.exception(e) logger.debug("EXCEPTION decoding post data") return HttpResponseServerError() logger.debug("@@ Decoded data: %r" % post) if 'result' in post: session_id = post['result']['sessionId'] elif 'session' in post: session_id = post['session']['id'] else: logger.error( "@@HEY, post is neither result nor session, what's going on?") return HttpResponseServerError() # Do we need to pass this to somebody else? if 'result' in post: logger.debug( "@@ results? we don't expect results, only callback users ought to be getting results. Return error." ) return HttpResponseServerError() s = post['session'] if 'parameters' in s: parms = s['parameters'] logger.debug("@@ got session") if 'callback_url' in parms: url = parms['callback_url'] view, args, kwargs = resolve(url) kwargs['request'] = request logger.debug("@@ passing tropo request to %s" % url) try: return view(*args, **kwargs) except Exception, e: logger.error("@@Caught exception calling callback:") logger.exception(e) return HttpResponseServerError() # Did we call Tropo so we could send a text message? if 'numberToDial' in parms: # Construct a JSON response telling Tropo to do that logger.debug("@@Telling Tropo to send message") try: j = json.dumps({ "tropo": [ { 'message': { 'say': { 'value': parms['msg'] }, 'to': parms['numberToDial'], 'from': parms['callerID'], 'channel': 'TEXT', 'network': 'SMS' } }, ] }) logger.debug("@@%s" % j) return HttpResponse(j) except Exception, e: logger.exception(e) return HttpResponseServerError()
def exception_traceback(request, e, template_name, extra_context={}): return HttpResponseServerError( exception_traceback_string(request, e, template_name, extra_context))
def accept(self, request): ''' Accept a request conforming to Authorization Code Flow OIDC Core 1.0 section 3.1.2.5 (http://openid.net/specs/openid-connect-core-1_0.html#AuthResponse) request is a django.http.HttpRequest Returns an HttpResponse, with corresponding values filled in for client ''' state = request.GET.get('state') code = request.GET.get('code') if not code: return HttpResponseBadRequest( 'callback did not contain an authorization code') if not state: return HttpResponseBadRequest( 'callback state did not match expected') w = get_pending_by_state(state) if not w: return HttpResponseBadRequest( 'callback request from login is malformed, or authorization session expired' ) else: logging.debug( 'accepted request maps to pending callback object: %s', vars(w)) if now() > w.creation_time + datetime.timedelta( seconds=Config['url_expiration_timeout']): logging.warn('authorization url has expired object: %s', vars(w)) return HttpResponseBadRequest( 'This authorization url has expired, please retry') provider = w.provider token_endpoint = get_provider_config(provider, 'token_endpoint') client_id = Config['providers'][provider]['client_id'] client_secret = Config['providers'][provider]['client_secret'] redirect_uri = Config['redirect_uri'] token_response = self._token_request(token_endpoint, client_id, client_secret, code, redirect_uri) if token_response.status_code not in [200, 302]: return HttpResponseServerError( 'could not acquire token from provider' + str(vars(token_response))) if provider == PROVIDER_GLOBUS: handler = GlobusRedirectHandler() elif provider == PROVIDER_AUTH0: handler = Auth0RedirectHandler() else: handler = self (success, msg, user, token, nonce) = handler._handle_token_response(w, token_response) if not success: return HttpResponseServerError(msg + ':' + token_response) if w.return_to: ret = HttpResponseRedirect( build_redirect_url(w.return_to, token)) else: ret = HttpResponse( 'Successfully authenticated user. Browser tab can be safely closed.' ) w.delete() return ret
idUser=user, idResource=gadget) userVote.vote = vote userVote.save() except Exception, ex: log(ex, request) return HttpResponseServerError( get_xml_error(unicode(ex)), mimetype='application/xml; charset=UTF-8') try: update_gadget_popularity(gadget) except Exception, ex: log(ex, request) return HttpResponseServerError( get_xml_error(unicode(ex)), mimetype='application/xml; charset=UTF-8') return get_vote_response(gadget, user, format) class GadgetVersionsCollection(Resource): def create(self, request, user_name): gadgets = simplejson.loads(request.POST["gadgets"]) result = [] for g in gadgets: version = get_last_gadget_version(g["name"], g["vendor"]) if version: # the gadget is still in the catalogue g["lastVersion"] = version url = GadgetResource.objects.get(short_name=g["name"], vendor=g["vendor"],
return TemplateResponse(req, 'zato/service/invoker.html', return_data) @method_allowed('POST') def invoke(req, name, cluster_id): """ Executes a service directly, even if it isn't exposed through any channel. """ try: input_dict = {} for attr in ('payload', 'data_format', 'transport'): input_dict[attr] = req.POST.get(attr, '') input_dict['to_json'] = True if input_dict.get( 'data_format') == DATA_FORMAT.JSON else False response = req.zato.client.invoke(name, **input_dict) except Exception, e: msg = 'Could not invoke the service. name:[{}], cluster_id:[{}], e:[{}]'.format( name, cluster_id, format_exc(e)) logger.error(msg) return HttpResponseServerError(msg) else: try: if response.ok: return HttpResponse(response.inner_service_response or '(None)') else: return HttpResponseServerError(response.details) except Exception, e: return HttpResponseServerError(format_exc(e))
def page_detail(request, page_pid, book_pid=None): #note: page_pid does not include 'bdr:' template = loader.get_template('rome_templates/page_detail.html') context = std_context() if not book_pid: book_pid = _get_book_pid_from_page_pid(u'bdr:%s' % page_pid) if not book_pid: return HttpResponseNotFound('Book for this page not found.') book_list_page = request.GET.get('book_list_page', None) context['book_mode'] = 1 context['print_mode'] = 0 if book_list_page: context['back_to_book_href'] = u'%s?page=%s' % (reverse('books'), book_list_page) context['back_to_thumbnail_href'] = u'%s?book_list_page=%s' % (reverse( 'thumbnail_viewer', kwargs={'book_pid': book_pid}), book_list_page) else: context['back_to_book_href'] = reverse('books') context['back_to_thumbnail_href'] = reverse( 'thumbnail_viewer', kwargs={'book_pid': book_pid}) context['pid'] = book_pid thumbnails = [] book_json_uri = u'https://%s/api/pub/items/bdr:%s/' % (BDR_SERVER, book_pid) r = requests.get(book_json_uri, timeout=60) if not r.ok: logger.error(u'TTWR - error retrieving url %s' % book_json_uri) logger.error(u'TTWR - response: %s - %s' % (r.status_code, r.text)) return HttpResponseServerError('Error retrieving content.') book_json = json.loads(r.text) context['short_title'] = book_json['brief']['title'] context['title'] = _get_full_title(book_json) try: author_list = book_json['contributor_display'] authors = "" for i in range(len(author_list)): if i == len(author_list) - 1: authors += author_list[i] else: authors += author_list[i] + "; " context['authors'] = authors except: context['authors'] = "contributor(s) not available" try: context['date'] = book_json['dateIssued'][0:4] except: try: context['date'] = book_json['dateCreated'][0:4] except: context['date'] = "n.d." context[ 'lowres_url'] = "https://%s/fedora/objects/bdr:%s/datastreams/lowres/content" % ( BDR_SERVER, page_pid) context['det_img_view_src'] = "https://%s/viewers/image/zoom/bdr:%s" % ( BDR_SERVER, page_pid) # annotations/metadata page_json_uri = u'https://%s/api/pub/items/bdr:%s/' % (BDR_SERVER, page_pid) r = requests.get(page_json_uri, timeout=60) if not r.ok: logger.error(u'TTWR - error retrieving url %s' % page_json_uri) logger.error(u'TTWR - response: %s - %s' % (r.status_code, r.text)) return HttpResponseServerError('Error retrieving content.') page_json = json.loads(r.text) annotations = page_json['relations']['hasAnnotation'] context['has_annotations'] = len(annotations) context['annotation_uris'] = [] context['annotations'] = [] for i in range(len(annotations)): annot_pid = annotations[i]['pid'] annot_studio_uri = annotations[i]['uri'] annot_xml_uri = 'https://%s/services/getMods/%s/' % (BDR_SERVER, annot_pid) context['annotation_uris'].append(annot_xml_uri) curr_annot = {} curr_annot['xml_uri'] = annot_xml_uri curr_annot['has_elements'] = { 'inscriptions': 0, 'annotations': 0, 'annotator': 0, 'origin': 0, 'title': 0, 'abstract': 0 } root = ET.fromstring(requests.get(annot_xml_uri).content) for title in root.getiterator('{http://www.loc.gov/mods/v3}titleInfo'): if title.attrib['lang'] == 'en': curr_annot['title'] = title[0].text curr_annot['has_elements']['title'] += 1 else: curr_annot['orig_title'] = title[0].text curr_annot['has_elements']['title'] += 1 curr_annot['names'] = [] for name in root.getiterator('{http://www.loc.gov/mods/v3}name'): curr_annot['names'].append({ 'name': name[0].text, 'role': name[1][0].text.capitalize() if (name[1][0].text) else "Contributor", 'trp_id': "%04d" % int(name.attrib['{http://www.w3.org/1999/xlink}href']), }) for abstract in root.getiterator( '{http://www.loc.gov/mods/v3}abstract'): curr_annot['abstract'] = abstract.text curr_annot['has_elements']['abstract'] = 1 for origin in root.getiterator( '{http://www.loc.gov/mods/v3}originInfo'): curr_annot['origin'] = origin[0].text curr_annot['has_elements']['origin'] = 1 curr_annot['notes'] = [] curr_annot['inscriptions'] = [] curr_annot['annotations'] = [] curr_annot['annotator'] = "" for note in root.getiterator('{http://www.loc.gov/mods/v3}note'): curr_note = {} for att in note.attrib: curr_note[att] = note.attrib[att] if note.text: curr_note['text'] = note.text if curr_note['type'].lower() == 'inscription' and note.text: curr_annot['inscriptions'].append(curr_note['displayLabel'] + ": " + curr_note['text']) curr_annot['has_elements']['inscriptions'] = 1 elif curr_note['type'].lower() == 'annotation' and note.text: curr_annot['annotations'].append(curr_note['displayLabel'] + ": " + curr_note['text']) curr_annot['has_elements']['annotations'] = 1 elif curr_note['type'].lower() == 'resp' and note.text: curr_annot['annotator'] = note.text curr_annot['has_elements']['annotator'] = 1 #curr_annot['notes'].append(curr_note) context['annotations'].append(curr_annot) c = RequestContext(request, context) #raise 404 if a certain book does not exist return HttpResponse(template.render(c))
def post(self, request, *args, **kwargs): ### get the client IP from the request ip = get_ip(request) informinterval = acs_settings.INFORM_INTERVAL ### check if we have an acs session id in a cookie if 'acs_session_id' in request.COOKIES: hexid = request.COOKIES['acs_session_id'] try: acs_session = AcsSession.objects.get(acs_session_id=hexid) acs_session.acs_log( "got acs_session_id from acs_session_id cookie") except AcsSession.DoesNotExist: ### create a new AcsSession? only if we haven't already got enough sessions from this client ip sessions_since_informinterval = AcsSession.objects.filter( client_ip=ip, created_date__gt=timezone.now() - timedelta(seconds=informinterval), ).count() if sessions_since_informinterval > acs_settings.INFORM_LIMIT_PER_INTERVAL: message = "acs session DENIED: the IP %s already has %s sessions the last %s seconds, no thanks (limit is %s)" % ( ip, sessions_since_informinterval, informinterval, acs_settings.INFORM_LIMIT_PER_INTERVAL) print(message) return HttpResponse(status=420) acs_session = AcsSession.objects.create(client_ip=ip, ) hexid = acs_session.hexid acs_session.acs_log( "got invalid acs_session_id %s from acs_session_id cookie, new acs session created" % request.COOKIES['acs_session_id']) else: ### no acs_session_id cookie seen, create a new AcsSession? only if we haven't already got enough sessions from this client ip sessions_since_informinterval = AcsSession.objects.filter( client_ip=ip, created_date__gt=timezone.now() - timedelta(seconds=informinterval), ).count() if sessions_since_informinterval > acs_settings.INFORM_LIMIT_PER_INTERVAL: message = "acs session DENIED: the IP %s already has %s sessions the last %s seconds, no thanks (limit is %s)" % ( ip, sessions_since_informinterval, informinterval, acs_settings.INFORM_LIMIT_PER_INTERVAL) print(message) return HttpResponse(status=420) acs_session = AcsSession.objects.create(client_ip=ip, ) ### and save the acs session ID (uuid.hex()) in the django session for later use hexid = acs_session.acs_session_id.hex acs_session.acs_log( "created new acs session (had %s sessions in the latest informinterval)" % sessions_since_informinterval) ### do we have a body in this http request? attempt parsing it as XML if so validxml = False if request.body: try: xmlroot = fromstring(request.body) validxml = True except Exception as E: acs_session.acs_log('got exception parsing ACS XML: %s' % E) ### get all HTTP headers for this request headerdict = {} for key, value in request.META.items(): ### in django all HTTP headers are prefixed with HTTP_ in request.META if key[:5] == 'HTTP_': headerdict[key] = value ### save this HTTP request to DB acs_http_request = AcsHttpRequest.objects.create( acs_session=acs_session, request_headers=json.dumps(headerdict), request_xml_valid=validxml, fk_body=create_xml_document(xml=request.body), ) acs_session.acs_log("saved acs http request %s to db" % acs_http_request) if request.body: ### bail out if we have a bad xml body if not validxml: message = 'Invalid XML body posted by client %s' % ip acs_session.acs_log(message) return HttpResponseBadRequest(message) ### figure out which cwmp version we are speaking (if any) if not 'cwmp' in xmlroot.nsmap: message = 'No cwmp namespace found in soap envelope, this is not a valid CWMP request posted by client %s' % ip acs_session.acs_log(message) return HttpResponseBadRequest(message) else: acs_session.cwmp_namespace = xmlroot.nsmap['cwmp'] acs_session.save() ### parse soap header and body soap_header = xmlroot.find('soap-env:Header', acs_session.soap_namespaces) soap_body = xmlroot.find('soap-env:Body', acs_session.soap_namespaces) if soap_body is None: # a soap body is required.. message = 'Unable to find SOAP body in xml posted by client %s' % ip acs_session.acs_log(message) return HttpResponseBadRequest(message) if soap_header is not None: ### parse the cwmp id from the soap header acs_http_request.cwmp_id = soap_header.find( 'cwmp:ID', acs_session.soap_namespaces).text ### do we have exactly one soap object in this soap body? if len(list(soap_body)) != 1: acs_http_request.save() message = 'Only one cwmp object per soap envelope please (client: %s)' % ip acs_session.acs_log(message) return HttpResponseBadRequest(message) else: ### this appears (for now) to be a valid soap envelope acs_http_request.request_soap_valid = True ### get the soap element in the format {namespace}Method acs_http_request.soap_element = list(soap_body)[0].tag else: ### empty request body, this means that the CPE is done for now acs_http_request.cwmp_id = '' acs_http_request.soap_element = '{%s}(empty request body)' % acs_http_request.acs_session.soap_namespaces[ 'cwmp'] ### save the http request acs_http_request.save() ################# http request saved to acs session, now we have to put a response together ################## ################# at this point we still have not associated the acs session with an acs device, ############# ################# and we can only do so if we have a valid inform with vendor, serial etc. for the device #### if not acs_session.acs_device: # we only permit Inform requests when we have no device if acs_http_request.cwmp_rpc_method != "Inform": message = 'An ACS session must begin with an Inform, not %s' % acs_http_request.cwmp_rpc_method acs_session.acs_log(message) return HttpResponseBadRequest(message) ### initialize a variable empty_response = False ### first things first, do we have a body in the http request? if request.body: if acs_http_request.cwmp_rpc_method in settings.CWMP_ACS_VALID_RPC_METHODS: #################################################################################################### acs_session.acs_log( 'the ACS client %s is calling a valid RPC method on the ACS server: %s' % (ip, acs_http_request.cwmp_rpc_method)) ### get SOAP response envelope root, body = get_soap_envelope(acs_http_request.cwmp_id, acs_session) ### set a few variables used when saving the HTTP response to db response_cwmp_rpc_method = '%sResponse' % acs_http_request.cwmp_rpc_method response_cwmp_id = acs_http_request.cwmp_id ### parse the soap request (which ACS RPC method is the CPE calling?) if acs_http_request.cwmp_rpc_method == 'Inform': ### get Inform xml element inform = soap_body.find('cwmp:Inform', acs_session.soap_namespaces) ### determine which data model version this device is using datamodel, created = CwmpDataModel.objects.get_or_create( name=acs_session.determine_data_model(inform)) acs_session.acs_log("ACS client is using data model %s" % datamodel) acs_session.root_data_model = datamodel ######################################################################### ### get deviceid element from Inform request deviceid = inform.find('DeviceId') if deviceid is None: message = 'Invalid Inform, DeviceID missing from request %s' % request acs_session.acs_log(message) return HttpResponseBadRequest(message) serial = deviceid.find('SerialNumber').text if not serial: message = 'Invalid Inform, SerialNumber missing from request %s' % request acs_session.acs_log(message) return HttpResponseBadRequest(message) vendor = deviceid.find('Manufacturer').text if not vendor: message = 'Invalid Inform, Manufacturer missing from request %s' % request acs_session.acs_log(message) return HttpResponseBadRequest(message) model = deviceid.find('ProductClass').text if not model: message = 'Invalid Inform, ProductClass missing from request %s' % request acs_session.acs_log(message) return HttpResponseBadRequest(message) oui = deviceid.find('OUI').text if not oui: message = 'Invalid Inform, OUI missing from request %s' % request acs_session.acs_log(message) return HttpResponseBadRequest(message) ### find or create acs devicevendor (using Manufacturer and OUI) acs_devicevendor, created = AcsDeviceVendor.objects.get_or_create( name=vendor, oui=oui, ) ### find or create acs devicetype (using ProductClass) acs_devicemodel, created = AcsDeviceModel.objects.get_or_create( vendor=acs_devicevendor, name=model, ) ### find or create acs device (using serial number and acs devicetype) acs_device, created = AcsDevice.objects.get_or_create( model=acs_devicemodel, serial=serial) ### set latest session result to False and increase inform count acs_device.acs_latest_session_result = False acs_device.acs_inform_count = F('acs_inform_count') + 1 acs_device.save() # save acs_device to acs_session acs_session.acs_device = acs_device acs_session.save() # attempt acs device association if not acs_device.get_related_device(): acs_device.associate_with_related_device() if not acs_device.acs_xmpp_password: acs_device.create_xmpp_user() if not acs_device.acs_connectionrequest_password: acs_device.create_connreq_password() if not acs_session.get_inform_eventcodes( inform, acs_http_request): # the event section is missing from this Inform return HttpResponseBadRequest() ######################################################### # refresh from db to make any changes above visible acs_device.refresh_from_db() # if this acs device is associated with a real device we can call that devices verify_acs_client_ip() method # and possibly mark this acs session as client_ip_verified=True (which is required before we give out any secrets like ssid in the session) if acs_device.get_related_device(): ### run acs pre-ip-verified session hook acs_device.get_related_device( ).acs_session_pre_verify_hook() # set acs_session.client_ip_verified based on the outcome of verify_acs_client_ip(acs_session.client_ip) acs_session.client_ip_verified = acs_device.get_related_device( ).verify_acs_client_ip(acs_session.client_ip) message = "client_ip_verified set to %s after running acs_device.get_related_device().verify_acs_client_ip(%s)" % ( acs_session.client_ip_verified, acs_session.client_ip) acs_session.acs_log(message) acs_session.save() ### run acs post-ip-verified session hook acs_device.get_related_device( ).acs_session_post_verify_hook() # refresh from db to make any changes above visible acs_device.refresh_from_db() ########################################################## ### this is a good place to check for different Inform EventCodes or use ### other data from the Inform # first we clean up any old unprocessed automatic jobs. # these might be lingering from earlier sessions that may have failed (for any number of reasons) deleted, info = acs_session.acs_device.acs_queue_jobs.filter( automatic=True, processed=False).delete() if deleted: acs_session.acs_log( "Cleanup: Deleted %s old unprocessed automatic AcsQueueJobs for this device" % deleted) ### get parameterlist from the Inform payload parameterlist = inform.find('ParameterList') ### update current_config_level from Device.ManagementServer.ParameterKey parameterkey = get_value_from_parameterlist( parameterlist, acs_session.get_acs_parameter_name( 'django_acs.acs.parameterkey')) if not parameterkey: acs_device.current_config_level = None else: acs_device.current_config_level = parse_datetime( parameterkey) ### update latest_inform time acs_device.acs_latest_inform = timezone.now() ### update current_software_version acs_device.current_software_version = get_value_from_parameterlist( parameterlist, acs_session.get_acs_parameter_name( 'django_acs.deviceinfo.softwareversion')) ### save acs device acs_device.save() ############################################### ### This is where we do things we want do _after_ an Inform session. ### Queue jobs here before sending InformResponse and they will be run in the same session. # queue GetParameterNames, GetParameterValues, GetParameterAttributes if not acs_session.collect_device_info( "Collecting information triggered by Inform"): # unable to queue neccesary job return HttpResponseServerError() ## Queue a firmware upgrade job? if not acs_session.device_firmware_upgrade(): # we wanted to queue a firmware upgrade job, but failed return HttpResponseServerError() ############################################### ### we are done processing the Inform RPC request, and ready to return the InformResponse, ### so add the outer response element cwmp = etree.SubElement(body, nse('cwmp', 'InformResponse')) ### add the inner response elements, without namespace (according to cwmp spec!) maxenv = etree.SubElement(cwmp, 'MaxEnvelopes') maxenv.text = '1' elif acs_http_request.cwmp_rpc_method == 'TransferComplete': ### handle TransferComplete RPC call cwmp = etree.SubElement( body, nse('cwmp', 'TransferCompleteResponse')) else: message = 'Unimplemented cwmp method %s called by the client %s' % ( acs_http_request.cwmp_rpc_method, acs_device) acs_session.acs_log(message) return HttpResponseBadRequest(message) ##################################################################################################### ### we are done processing the http request, put HTTP response together output = etree.tostring(root, encoding='utf-8', xml_declaration=True) response = HttpResponse(output, content_type='text/xml; charset=utf-8') ### save the HTTP response acs_http_response = AcsHttpResponse.objects.create( http_request=acs_http_request, fk_body=create_xml_document(xml=response.content), cwmp_id=response_cwmp_id, soap_element="{%s}%s" % (acs_session.soap_namespaces['cwmp'], response_cwmp_rpc_method), rpc_response_to=acs_http_request, ) acs_session.acs_log( "responding to CPE %s with %s" % (acs_session.acs_device, response_cwmp_rpc_method)) elif acs_http_request.cwmp_rpc_method and acs_http_request.cwmp_rpc_method[: -8] in settings.CWMP_CPE_VALID_RPC_METHODS: ##################################################################################################### acs_session.acs_log( 'the CPE %s is responding to an RPC call from the ACS: %s' % (acs_session.acs_device, acs_http_request.cwmp_rpc_method)) ### first link this http request to the related rpc request (which is in a http response), ### find it by looking for the same rpc method and cwmp id in http responses in this acs session match = False for httpresponse in acs_session.acs_http_responses: if httpresponse.cwmp_rpc_method == acs_http_request.cwmp_rpc_method[: -8] and httpresponse.cwmp_id == acs_http_request.cwmp_id: acs_http_request.rpc_response_to = httpresponse acs_http_request.save() match = True if not match: message = 'Unable to find the HTTP response containing the RPC request being responded to :(' acs_session.acs_log(message) return HttpResponseServerError(message) ### parse the cwmp object from the soap body rpcresponsexml = soap_body.find( 'cwmp:%s' % acs_http_request.cwmp_rpc_method, acs_session.soap_namespaces) if acs_http_request.cwmp_rpc_method == 'GetParameterNamesResponse': ### do nothing for now, the response will be used when the GetParameterValuesResponse comes in later pass elif acs_http_request.cwmp_rpc_method == 'GetParameterValuesResponse': # nothing here for now pass elif acs_http_request.cwmp_rpc_method == 'GetParameterAttributesResponse': # this is a GetParameterAttributesResponse, attempt to update the device acs parameters if acs_session.acs_device.update_acs_parameters( acs_http_request): ################################################################################################# ### this is where we do things to and with the recently fetched acs parameters from the device, ### like configuring the device or handling user config changes ### Queue jobs here before sending GetParameterAttributesResponse and they will be run in the same session. # extract device uptime from acs_device.acs_parameters and save it to acs_session.device_uptime acs_session.update_device_uptime() # check if we need to call the handle_user_config_changes() method on the acs_device, # we only check for user changes if a device has been configured by us already, and doesn't need any more config at the moment if acs_session.acs_device.current_config_level and acs_session.acs_device.current_config_level > acs_session.acs_device.get_desired_config_level( ): # device is already configured, and doesn't need additional config from us right now, so check if the user changed anything on the device, and act accordingly acs_session.acs_device.handle_user_config_changes() # refresh to get any changes from above acs_session.refresh_from_db() # if this device has been reconfigured in this session we collect data again, # if not, we reconfigure it if needed if acs_session.configuration_done: # device has been configured, so collect data again so we have the latest (unless we have already done so) if not acs_session.post_configuration_collection_done: if not acs_session.collect_device_info( reason="Device has been reconfigured"): acs_session.acs_log( "Unable to queue one or more jobs to collect info after configuration" ) return HttpResponseServerError() else: # this device has not been configured in this ACS session. This is where we check if we need to configure it now. # acs_session.configure_device returns False if there was a problem configuring the device, and true if # the device was configured, or did not need to be configured if not acs_session.configure_device(): # there was a problem creating configure jobs for the device return HttpResponseServerError() elif acs_http_request.cwmp_rpc_method == 'GetRPCMethodsResponse': pass elif acs_http_request.cwmp_rpc_method == 'SetParameterValuesResponse': ### find status status = rpcresponsexml.find('Status').text if status != '0': ### ACS client failed to apply all our settings, fuckery is afoot! message = 'The ACS device %s failed to apply our SetParameterValues settings, something is wrong!' % acs_device acs_session.acs_log(message) return HttpResponseBadRequest(message) ### find the parameterkey and update the acs_device so we know its current_config_level ### since this is a SetParameterValuesResponse we will probably get settings.CWMP_CONFIG_INCOMPLETE_PARAMETERKEY_DATE here, ### which is fine(tm) parameterkey = acs_http_request.rpc_response_to.soap_body.find( 'cwmp:SetParameterValues', acs_session.soap_namespaces).find('ParameterKey').text acs_session.acs_device.current_config_level = parse_datetime( parameterkey) elif acs_http_request.cwmp_rpc_method == 'SetParameterAttributesResponse': ### find the parameterkey and update the acs_device so we know its current_config_level parameterkey = acs_http_request.rpc_response_to.soap_body.find( 'cwmp:SetParameterAttributes', acs_session.soap_namespaces).find('ParameterKey').text acs_session.acs_device.current_config_level = parse_datetime( parameterkey) # in case we have a local desired_config_level on the acs device, unset it now as the configuration has been done if acs_session.acs_device.desired_config_level: acs_session.acs_device.desired_config_level = None acs_session.acs_device.save() elif acs_http_request.cwmp_rpc_method == 'FactoryResetResponse': empty_response = True ### we are done processing the clients response, do we have anything else? response = acs_http_request.get_response( empty_response=empty_response) else: ##################################################################################################### ### TODO: insert some code to handle soapfault here so we dont hit the "Unknown cwmp object/method" bit below when a soapfault happens ##################################################################################################### acs_session.acs_log( 'unknown cwmp object/method received from %s: %s' % (acs_session.acs_device, acs_http_request.cwmp_rpc_method)) return HttpResponseBadRequest( 'unknown cwmp object/method received') else: # this http request has an empty body acs_session.acs_log( 'the CPE %s is done and posted an empty body to the ACS' % acs_session.acs_device) ### get a response for the client - if we have nothing queued it will be an empty response response = acs_http_request.get_response() ### all done, update the acs session with result before returning response acs_session.update_session_result() ### set the acs session cookie # we have to set this cookie manually because some stupid ACS client cannot parse expires in a http cookie # and Django always sets exipires in cookies, no even it the expires argument is set to None, # to be compatible with old IE clients yay #response.set_cookie(key='acs_session_id', value=max_age=60, expires=None, path='/') response[ 'Set-Cookie'] = "acs_session_id=%s; Max-Age=60; Path=/" % hexid return response
Parameters:: - `PISTON_EMAIL_ERRORS`: Will send a Django formatted error email to people in `settings.ADMINS`. - `PISTON_DISPLAY_ERRORS`: Will return a simple traceback to the caller, so he can tell you what error they got. If `PISTON_DISPLAY_ERRORS` is not enabled, the caller will receive a basic "500 Internal Server Error" message. """ exc_type, exc_value, tb = sys.exc_info() rep = ExceptionReporter(request, exc_type, exc_value, tb.tb_next) if self.email_errors: self.email_exception(rep) if self.display_errors: return HttpResponseServerError( format_error('\n'.join(rep.format_exception()))) else: raise content_range = None if isinstance(result, QuerySet): """ Limit results based on requested items. This is a based on HTTP 1.1 Partial GET, RFC 2616 sec 14.35, but is intended to operate on the record level rather than the byte level. We will still respond with code 206 and a range header. """ request_range = None if 'HTTP_RANGE' in request.META:
def instructor_dashboard_2(request, course_id): """ Display the instructor dashboard for a course. """ try: course_key = CourseKey.from_string(course_id) except InvalidKeyError: log.error( u"Unable to find course with course key %s while loading the Instructor Dashboard.", course_id) return HttpResponseServerError() course = get_course_by_id(course_key, depth=0) access = { 'admin': request.user.is_staff, 'instructor': bool(has_access(request.user, 'instructor', course)), 'finance_admin': CourseFinanceAdminRole(course_key).has_user(request.user), 'sales_admin': CourseSalesAdminRole(course_key).has_user(request.user), 'staff': bool(has_access(request.user, 'staff', course)), 'forum_admin': has_forum_access(request.user, course_key, FORUM_ROLE_ADMINISTRATOR), 'data_researcher': request.user.has_perm(permissions.CAN_RESEARCH, course_key), } if not request.user.has_perm(permissions.VIEW_DASHBOARD, course_key): raise Http404() is_white_label = CourseMode.is_white_label(course_key) reports_enabled = configuration_helpers.get_value('SHOW_ECOMMERCE_REPORTS', False) sections = [] if access['staff']: sections.extend([ _section_course_info(course, access), _section_membership(course, access), _section_cohort_management(course, access), _section_discussions_management(course, access), _section_student_admin(course, access), ]) if access['data_researcher']: sections.append(_section_data_download(course, access)) analytics_dashboard_message = None if show_analytics_dashboard_message(course_key) and (access['staff'] or access['instructor']): # Construct a URL to the external analytics dashboard analytics_dashboard_url = '{0}/courses/{1}'.format( settings.ANALYTICS_DASHBOARD_URL, six.text_type(course_key)) link_start = HTML(u"<a href=\"{}\" rel=\"noopener\" target=\"_blank\">" ).format(analytics_dashboard_url) analytics_dashboard_message = _( u"To gain insights into student enrollment and participation {link_start}" u"visit {analytics_dashboard_name}, our new course analytics product{link_end}." ) analytics_dashboard_message = Text(analytics_dashboard_message).format( link_start=link_start, link_end=HTML("</a>"), analytics_dashboard_name=settings.ANALYTICS_DASHBOARD_NAME) # Temporarily show the "Analytics" section until we have a better way of linking to Insights sections.append(_section_analytics(course, access)) # Check if there is corresponding entry in the CourseMode Table related to the Instructor Dashboard course course_mode_has_price = False paid_modes = CourseMode.paid_modes_for_course(course_key) if len(paid_modes) == 1: course_mode_has_price = True elif len(paid_modes) > 1: log.error( u"Course %s has %s course modes with payment options. Course must only have " u"one paid course mode to enable eCommerce options.", six.text_type(course_key), len(paid_modes)) if access['instructor'] and is_enabled_for_course(course_key): sections.insert(3, _section_extensions(course)) # Gate access to course email by feature flag & by course-specific authorization if is_bulk_email_feature_enabled(course_key) and (access['staff'] or access['instructor']): sections.append(_section_send_email(course, access)) # Gate access to Ecommerce tab if course_mode_has_price and (access['finance_admin'] or access['sales_admin']): sections.append( _section_e_commerce(course, access, paid_modes[0], is_white_label, reports_enabled)) # Gate access to Special Exam tab depending if either timed exams or proctored exams # are enabled in the course user_has_access = any([ request.user.is_staff, CourseStaffRole(course_key).has_user(request.user), CourseInstructorRole(course_key).has_user(request.user) ]) course_has_special_exams = course.enable_proctored_exams or course.enable_timed_exams can_see_special_exams = course_has_special_exams and user_has_access and settings.FEATURES.get( 'ENABLE_SPECIAL_EXAMS', False) if can_see_special_exams: sections.append(_section_special_exams(course, access)) # Certificates panel # This is used to generate example certificates # and enable self-generated certificates for a course. # Note: This is hidden for all CCXs certs_enabled = CertificateGenerationConfiguration.current( ).enabled and not hasattr(course_key, 'ccx') if certs_enabled and access['admin']: sections.append(_section_certificates(course)) openassessment_blocks = modulestore().get_items( course_key, qualifiers={'category': 'openassessment'}) # filter out orphaned openassessment blocks openassessment_blocks = [ block for block in openassessment_blocks if block.parent is not None ] if len(openassessment_blocks) > 0 and access['staff']: sections.append( _section_open_response_assessment(request, course, openassessment_blocks, access)) disable_buttons = not _is_small_course(course_key) certificate_white_list = CertificateWhitelist.get_certificate_white_list( course_key) generate_certificate_exceptions_url = reverse( 'generate_certificate_exceptions', kwargs={ 'course_id': six.text_type(course_key), 'generate_for': '' }) generate_bulk_certificate_exceptions_url = reverse( 'generate_bulk_certificate_exceptions', kwargs={'course_id': six.text_type(course_key)}) certificate_exception_view_url = reverse( 'certificate_exception_view', kwargs={'course_id': six.text_type(course_key)}) certificate_invalidation_view_url = reverse( 'certificate_invalidation_view', kwargs={'course_id': six.text_type(course_key)}) certificate_invalidations = CertificateInvalidation.get_certificate_invalidations( course_key) context = { 'course': course, 'studio_url': get_studio_url(course, 'course'), 'sections': sections, 'disable_buttons': disable_buttons, 'analytics_dashboard_message': analytics_dashboard_message, 'certificate_white_list': certificate_white_list, 'certificate_invalidations': certificate_invalidations, 'generate_certificate_exceptions_url': generate_certificate_exceptions_url, 'generate_bulk_certificate_exceptions_url': generate_bulk_certificate_exceptions_url, 'certificate_exception_view_url': certificate_exception_view_url, 'certificate_invalidation_view_url': certificate_invalidation_view_url, 'xqa_server': settings.FEATURES.get('XQA_SERVER', "http://your_xqa_server.com"), } return render_to_response( 'instructor/instructor_dashboard_2/instructor_dashboard_2.html', context)
def server_error(request, template_name='500.html'): """Include MEDIA_URL in 500 error pages.""" t = loader.get_template(template_name) return HttpResponseServerError( t.render(Context({'MEDIA_URL': settings.MEDIA_URL})))
def Upload(request): try: # Prepare a message for odoo boundary = email.generator._make_boundary() odoo_db = Parameter.getValue("odoo.db", request.database) odoo_company = Parameter.getValue("odoo.company", request.database) odoo_user = Parameter.getValue("odoo.user", request.database) odoo_password = settings.ODOO_PASSWORDS.get(request.database, None) if not odoo_password: odoo_password = Parameter.getValue("odoo.password", request.database) if not odoo_db or not odoo_company or not odoo_user or not odoo_password: return HttpResponseServerError( _("Invalid configuration parameters")) data_odoo = [ "--%s" % boundary, 'Content-Disposition: form-data; name="webtoken"\r', "\r", "%s\r" % jwt.encode( { "exp": round(time.time()) + 600, "user": odoo_user }, settings.DATABASES[request.database].get( "SECRET_WEBTOKEN_KEY", settings.SECRET_KEY), algorithm="HS256", ).decode("ascii"), "--%s\r" % boundary, 'Content-Disposition: form-data; name="database"', "", odoo_db, "--%s" % boundary, 'Content-Disposition: form-data; name="language"', "", Parameter.getValue("odoo.language", request.database, "en_US"), "--%s" % boundary, 'Content-Disposition: form-data; name="company"', "", odoo_company, "--%s" % boundary, 'Content-Disposition: form-data; name="mode"', "", "2", # Marks incremental export "--%s" % boundary, 'Content-Disposition: file; name="frePPLe plan"; filename="frepple_plan.xml"', "Content-Type: application/xml", "", '<?xml version="1.0" encoding="UTF-8" ?>', '<plan xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><operationplans>', ] # Validate records which exist in the database data = json.loads(request.body.decode("utf-8")) data_ok = False obj = [] for rec in data: try: if rec["type"] == "PO": po = PurchaseOrder.objects.using( request.database).get(reference=rec["reference"]) if (not po.supplier.source or po.status != "proposed" or not po.item.source): continue data_ok = True obj.append(po) data_odoo.append( '<operationplan ordertype="PO" id="%s" item=%s location=%s supplier=%s start="%s" end="%s" quantity="%s" location_id=%s item_id=%s criticality="%d"/>' % ( po.reference, quoteattr(po.item.name), quoteattr(po.location.name), quoteattr(po.supplier.name), po.startdate, po.enddate, po.quantity, quoteattr(po.location.subcategory or ""), quoteattr(po.item.subcategory or ""), int(po.criticality), )) elif rec["type"] == "DO": do = DistributionOrder.objects.using( request.database).get(reference=rec["reference"]) if (not do.origin.source or not do.location.source or do.status != "proposed" or not do.item.source): continue data_ok = True obj.append(do) data_odoo.append( '<operationplan status="%s" reference="%s" ordertype="DO" item=%s origin=%s destination=%s start="%s" end="%s" quantity="%s" origin_id=%s destination_id=%s item_id=%s criticality="%d"/>' % ( do.status, do.reference, quoteattr(do.item.name), quoteattr(do.origin.name), quoteattr(do.location.name), do.startdate, do.enddate, do.quantity, quoteattr(do.origin.subcategory or ""), quoteattr(do.location.subcategory or ""), quoteattr(do.item.subcategory or ""), int(do.criticality), )) else: op = OperationPlan.objects.using( request.database).get(reference=rec["reference"]) if (not op.operation.source or op.status != "proposed" or not op.operation.item): continue data_ok = True obj.append(op) data_odoo.append( '<operationplan ordertype="MO" id="%s" item=%s location=%s operation=%s start="%s" end="%s" quantity="%s" location_id=%s item_id=%s criticality="%d"/>' % ( op.reference, quoteattr(op.operation.item.name), quoteattr(op.operation.location.name), quoteattr(op.operation.name), op.startdate, op.enddate, op.quantity, quoteattr(op.operation.location.subcategory or ""), quoteattr(op.operation.item.subcategory or ""), int(op.criticality), )) except Exception: pass if not data_ok: return HttpResponseServerError( _("No proposed data records selected")) # Send the data to Odoo data_odoo.append("</operationplans></plan>") data_odoo.append("--%s--" % boundary) data_odoo.append("") body = "\n".join(data_odoo).encode("utf-8") size = len(body) encoded = base64.encodestring( ("%s:%s" % (odoo_user, odoo_password)).encode("utf-8")) logger.debug("Uploading %d bytes of planning results to Odoo" % size) req = Request( "%sfrepple/xml/" % Parameter.getValue("odoo.url", request.database), data=body, headers={ "Authorization": "Basic %s" % encoded.decode("ascii")[:-1], "Content-Type": "multipart/form-data; boundary=%s" % boundary, "Content-length": size, }, ) # Read the response with urlopen(req) as f: msg = f.read() logger.debug("Odoo response: %s" % msg.decode("utf-8")) for i in obj: i.status = "approved" i.source = "odoo_1" i.save(using=request.database) return HttpResponse("OK") except HTTPError: logger.error("Can't connect to the Odoo server") return HttpResponseServerError("Can't connect to the odoo server") except Exception as e: logger.error(e) return HttpResponseServerError("internal server error")
def download_subtitles(request, format): video_id = request.GET.get('video_id') lang_id = request.GET.get('lang_pk') revision = request.GET.get('revision', None) if not video_id: #if video_id == None, Video.objects.get raise exception. Better show 404 #because video_id is required raise Http404 video = get_object_or_404(models.Video, video_id=video_id) if not lang_id: # if no language is passed, assume it's the original one language = video.subtitle_language() if language is None: raise Http404 else: try: language = video.newsubtitlelanguage_set.get(pk=lang_id) except ObjectDoesNotExist: raise Http404 team_video = video.get_team_video() if not team_video: # Non-team videos don't require moderation version = language and language.version(public_only=False, version_number=revision) else: # Members can see all versions member = get_member(request.user, team_video.team) if member: version = language and language.version(public_only=False, version_number=revision) else: version = language and language.version(version_number=revision) if not version: raise Http404 if not format in babelsubs.get_available_formats(): raise HttpResponseServerError("Format not found") subs_text = babelsubs.to(version.get_subtitles(), format, language=version.language_code) # since this is a downlaod, we can afford not to escape tags, specially true # since speaker change is denoted by '>>' and that would get entirely stripped out response = HttpResponse(subs_text, mimetype="text/plain") original_filename = '%s.%s' % (video.lang_filename( language.language_code), format) if not 'HTTP_USER_AGENT' in request.META or u'WebKit' in request.META[ 'HTTP_USER_AGENT']: # Safari 3.0 and Chrome 2.0 accepts UTF-8 encoded string directly. filename_header = 'filename=%s' % original_filename.encode('utf-8') elif u'MSIE' in request.META['HTTP_USER_AGENT']: try: original_filename.encode('ascii') except UnicodeEncodeError: original_filename = 'subtitles.' + format filename_header = 'filename=%s' % original_filename else: # For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers). filename_header = 'filename*=UTF-8\'\'%s' % iri_to_uri( original_filename.encode('utf-8')) response['Content-Disposition'] = 'attachment; ' + filename_header return response
def _get_resized_image(page, width): try: im = _get_image(page) except IOError, e: return HttpResponseServerError("Unable to create image: %s" % e)
def handler500(request): return HttpResponseServerError("Sentry error: %s" % sentry_sdk.last_event_id())
def medium(request, lccn, date, edition, sequence): page = get_page(lccn, date, edition, sequence) try: im = _get_resized_image(page, 550) except IOError, e: return HttpResponseServerError("Unable to create thumbnail: %s" % e)
settings_dict[k] = cleanse_setting(k, getattr(settings, k)) return settings_dict 500 错误应答 def technical_500_response(request, exc_type, exc_value, tb): """ Create a technical server error response. The last three arguments are the values returned from sys.exc_info() and friends. """ reporter = ExceptionReporter(request, exc_type, exc_value, tb) if request.is_ajax(): 如果是 ajax 的请求: text = reporter.get_traceback_text() return HttpResponseServerError(text, content_type='text/plain') else: html = reporter.get_traceback_html() return HttpResponseServerError(html, content_type='text/html') # Cache for the default exception reporter filter instance. default_exception_reporter_filter = None def get_exception_reporter_filter(request): global default_exception_reporter_filter if default_exception_reporter_filter is None: # Load the default filter for the first time and cache it. modpath = settings.DEFAULT_EXCEPTION_REPORTER_FILTER 默认的异常报告过滤器 modname, classname = modpath.rsplit('.', 1) try: mod = import_module(modname) except ImportError as e: raise ImproperlyConfigured(
def message_received(request, backend_name): """Handle HTTP requests from Tropo. """ #logger.debug("@@Got request from Tropo: %s" % request) if request.method == 'POST': logger.debug("@@ Raw data: %s" % request.raw_post_data) try: post = json.loads(request.raw_post_data) except Exception, e: logger.exception(e) logger.debug("EXCEPTION decoding post data") return HttpResponseServerError() logger.debug("@@ Decoded data: %r" % post) if 'result' in post: session_id = post['result']['sessionId'] elif 'session' in post: session_id = post['session']['id'] else: logger.error( "@@HEY, post is neither result nor session, what's going on?") return HttpResponseServerError() # Do we need to pass this to somebody else? if 'result' in post: logger.debug( "@@ results? we don't expect results, only callback users ought to be getting results. Return error." ) return HttpResponseServerError() s = post['session'] if 'parameters' in s: parms = s['parameters'] logger.debug("@@ got session") # A couple kinds of requests that are due to our calling Tropo # and asking to be called back. We can validate these by looking # for our token, which we pass when we call Tropo and Tropo # passes back to us now. if 'callback_url' in parms or 'numberToDial' in parms: # Confirm that the message includes our token (we always # include it when we call Tropo for this) if 'token' not in parms: logger.error("@@ Got numbertoDial or callback_url " "request without any token") return HttpResponseBadRequest() our_token = settings.INSTALLED_BACKENDS[backend_name]\ ['config']['messaging_token'] if our_token != parms['token']: logger.error("@@ Got wrong token in numberToDial or " "callback_url request") return HttpResponseBadRequest() if 'callback_url' in parms: url = parms['callback_url'] view, args, kwargs = resolve(url) kwargs['request'] = request logger.debug("@@ passing tropo request to %s" % url) try: return view(*args, **kwargs) except Exception, e: logger.error("@@Caught exception calling callback:") logger.exception(e) return HttpResponseServerError() # Did we call Tropo so we could send a text message? if 'numberToDial' in parms: # Construct a JSON response telling Tropo to do that logger.debug("@@Telling Tropo to send message") try: j = json.dumps({ "tropo": [ { 'message': { 'say': { 'value': parms['msg'] }, 'to': parms['numberToDial'], 'from': parms['callerID'], 'channel': 'TEXT', 'network': 'SMS' } }, ] }) logger.debug("@@%s" % j) return HttpResponse(j) except Exception, e: logger.exception(e) return HttpResponseServerError()
def gimme_json_for_portfolio(request): "Get JSON used to live-update the portfolio editor." """JSON includes: * The person's data. * DataImportAttempts. * other stuff""" # Since this view is meant to be accessed asynchronously, it doesn't make # much sense to decorate it with @login_required, since this will redirect # the user to the login page. Not much use if the browser is requesting # this page async'ly! So let's use a different method that explicitly warns # the user if they're not logged in. At time of writing, this error message # is NOT displayed on screen. I suppose someone will see if it they're # using Firebug, or accessing the page synchronously. if not request.user.is_authenticated(): return HttpResponseServerError("Oops, you're not logged in.") person = request.user.get_profile() # Citations don't naturally serialize summaries. citations = list(Citation.untrashed.filter(portfolio_entry__person=person)) portfolio_entries_unserialized = PortfolioEntry.objects.filter( person=person, is_deleted=False) projects_unserialized = [p.project for p in portfolio_entries_unserialized] # Serialize citation summaries summaries = {} for c in citations: summaries[c.pk] = render_to_string( "profile/portfolio/citation_summary.html", {'citation': c}) # FIXME: Maybe we can serialize directly to Python objects. # fixme: zomg don't recycle variable names for objs of diff types # srsly u guys! five_minutes_ago = datetime.datetime.utcnow() - \ datetime.timedelta(minutes=5) recent_dias = DataImportAttempt.objects.filter( person=person, date_created__gt=five_minutes_ago) recent_dias_json = simplejson.loads( serializers.serialize('json', recent_dias)) portfolio_entries = simplejson.loads( serializers.serialize('json', portfolio_entries_unserialized)) projects = simplejson.loads( serializers.serialize('json', projects_unserialized)) # FIXME: Don't send like all the flippin projects down the tubes. citations = simplejson.loads(serializers.serialize('json', citations)) recent_dias_that_are_completed = recent_dias.filter(completed=True) import_running = recent_dias.count() > 0 and ( recent_dias_that_are_completed.count() != recent_dias.count()) progress_percentage = 100 if import_running: progress_percentage = int(recent_dias_that_are_completed.count() * 100.0 / recent_dias.count()) import_data = { 'running': import_running, 'progress_percentage': progress_percentage, } json = simplejson.dumps({ 'dias': recent_dias_json, 'import': import_data, 'citations': citations, 'portfolio_entries': portfolio_entries, 'projects': projects, 'summaries': summaries, 'messages': request.user.get_and_delete_messages(), }) return HttpResponse(json, mimetype='application/json')
def followed_threads(request, course_key, user_id): """ Ajax-only endpoint retrieving the threads followed by a specific user. """ course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=True) try: profiled_user = cc.User(id=user_id, course_id=course_key) query_params = { 'page': 1, 'per_page': THREADS_PER_PAGE, # more than threads_per_page to show more activities 'sort_key': 'date', } query_params.update( strip_none( extract(request.GET, [ 'page', 'sort_key', 'flagged', 'unread', 'unanswered', ]))) try: group_id = get_group_id_for_comments_service(request, course_key) except ValueError: return HttpResponseServerError("Invalid group_id") if group_id is not None: query_params['group_id'] = group_id paginated_results = profiled_user.subscribed_threads(query_params) print "\n \n \n paginated results \n \n \n " print paginated_results query_params['page'] = paginated_results.page query_params['num_pages'] = paginated_results.num_pages user_info = cc.User.from_django_user(request.user).to_dict() with function_trace("get_metadata_for_threads"): annotated_content_info = utils.get_metadata_for_threads( course_key, paginated_results.collection, request.user, user_info) if request.is_ajax(): is_staff = has_permission(request.user, 'openclose_thread', course.id) return utils.JsonResponse({ 'annotated_content_info': annotated_content_info, 'discussion_data': [ utils.prepare_content(thread, course_key, is_staff) for thread in paginated_results.collection ], 'page': query_params['page'], 'num_pages': query_params['num_pages'], }) #TODO remove non-AJAX support, it does not appear to be used and does not appear to work. else: context = { 'course': course, 'user': request.user, 'django_user': User.objects.get(id=user_id), 'profiled_user': profiled_user.to_dict(), 'threads': paginated_results.collection, 'user_info': user_info, 'annotated_content_info': annotated_content_info, # 'content': content, } return render_to_response('discussion/user_profile.html', context) except User.DoesNotExist: raise Http404
def index(req): try: jobs = [] # Build a list of schedulers for a given Zato cluster. if req.zato.cluster_id and req.method == 'GET': # We have a server to pick the schedulers from, try to invoke it now. response = req.zato.client.invoke( 'zato.scheduler.job.get-list', {'cluster_id': req.zato.cluster_id}) if response.has_data: for job_elem in response.data: id = job_elem.id name = job_elem.name is_active = job_elem.is_active job_type = job_elem.job_type start_date = job_elem.start_date service_name = job_elem.service_name extra = job_elem.extra job_type_friendly = job_type_friendly_names[job_type] job = Job(id, name, is_active, job_type, from_utc_to_user(start_date + '+00:00', req.zato.user_profile), extra, service_name=service_name, job_type_friendly=job_type_friendly) if job_type == SCHEDULER.JOB_TYPE.ONE_TIME: definition_text = _one_time_job_def( req.zato.user_profile, start_date) elif job_type == SCHEDULER.JOB_TYPE.INTERVAL_BASED: definition_text = _interval_based_job_def( req.zato.user_profile, _get_start_date(job_elem.start_date), job_elem.repeats, job_elem.weeks, job_elem.days, job_elem.hours, job_elem.minutes, job_elem.seconds) weeks = job_elem.weeks or '' days = job_elem.days or '' hours = job_elem.hours or '' minutes = job_elem.minutes or '' seconds = job_elem.seconds or '' repeats = job_elem.repeats or '' ib_job = IntervalBasedJob(None, None, weeks, days, hours, minutes, seconds, repeats) job.interval_based = ib_job elif job_type == SCHEDULER.JOB_TYPE.CRON_STYLE: cron_definition = job_elem.cron_definition or '' definition_text = _cron_style_job_def( req.zato.user_profile, start_date, cron_definition) cs_job = CronStyleJob(None, None, cron_definition) job.cron_style = cs_job else: msg = 'Unrecognized job type, name:[{0}], type:[{1}]'.format( name, job_type) logger.error(msg) raise ZatoException(msg) job.definition_text = definition_text jobs.append(job) else: logger.info('No jobs found, response:[{}]'.format(response)) if req.method == 'POST': action = req.POST.get('zato_action', '') if not action: msg = 'req.POST contains no [zato_action] parameter.' logger.error(msg) return HttpResponseServerError(msg) job_type = req.POST.get('job_type', '') if action != 'execute' and not job_type: msg = 'req.POST contains no [job_type] parameter.' logger.error(msg) return HttpResponseServerError(msg) job_name = req.POST['{0}-{1}-name'.format(action, job_type)] # Try to match the action and a job type with an action handler.. handler_name = '_' + action if action != 'execute': handler_name += '_' + job_type handler = globals().get(handler_name) if not handler: msg = ('No handler found for action [{0}], job_type:[{1}], ' 'req.POST:[{2}], req.GET:[{3}].'.format( action, job_type, pprint(req.POST), pprint(req.GET))) logger.error(msg) return HttpResponseServerError(msg) # .. invoke the action handler. try: response = handler(req.zato.client, req.zato.user_profile, req.zato.cluster, req.POST) response = response if response else '' if response: response['message'] = _get_success_message( action, job_type, job_name) response = dumps(response) return HttpResponse(response, mimetype='application/javascript') except Exception, e: msg = ('Could not invoke action [%s], job_type:[%s], e:[%s]' 'req.POST:[%s], req.GET:[%s]') % ( action, job_type, format_exc(), pprint( req.POST), pprint(req.GET)) logger.error(msg) return HttpResponseServerError(msg) return_data = { 'zato_clusters': req.zato.clusters, 'cluster_id': req.zato.cluster_id, 'choose_cluster_form': req.zato.choose_cluster_form, 'jobs': jobs, 'friendly_names': job_type_friendly_names.items(), 'create_one_time_form': OneTimeSchedulerJobForm(create_one_time_prefix, req), 'create_interval_based_form': IntervalBasedSchedulerJobForm(create_interval_based_prefix, req), 'create_cron_style_form': CronStyleSchedulerJobForm(create_cron_style_prefix, req), 'edit_one_time_form': OneTimeSchedulerJobForm(edit_one_time_prefix, req), 'edit_interval_based_form': IntervalBasedSchedulerJobForm(edit_interval_based_prefix, req), 'edit_cron_style_form': CronStyleSchedulerJobForm(edit_cron_style_prefix, req), 'sample_dt': get_sample_dt(req.zato.user_profile), } return_data.update(get_js_dt_format(req.zato.user_profile)) return TemplateResponse(req, 'zato/scheduler.html', return_data)
def errorPage(message): template = loader.get_template('500.html') context = Context(dict(message=message)) return HttpResponseServerError(template.render(context))
else: logger.error('Invalid file descriptor: {0}'.format(fd)) if redis_settings.WS4REDIS_HEARTBEAT: websocket.send(redis_settings.WS4REDIS_HEARTBEAT) except WebSocketError, excpt: logger.warning('WebSocketError: ', exc_info=sys.exc_info()) response = HttpResponse(status=1001, content='Websocket Closed') except UpgradeRequiredError, excpt: logger.info('Websocket upgrade required') response = HttpResponseBadRequest(status=426, content=excpt) except HandshakeError, excpt: logger.warning('HandshakeError: ', exc_info=sys.exc_info()) response = HttpResponseBadRequest(content=excpt) except Exception, excpt: logger.error('Other Exception: ', exc_info=sys.exc_info()) response = HttpResponseServerError(content=excpt) else: response = HttpResponse() if websocket: websocket.close(code=1001, message='Websocket Closed') if hasattr(start_response, 'im_self') and not start_response.im_self.headers_sent: logger.warning('Staring late response on websocket') status_text = STATUS_CODE_TEXT.get(response.status_code, 'UNKNOWN STATUS CODE') status = '{0} {1}'.format(response.status_code, status_text) start_response(force_str(status), response._headers.values()) logger.info('Finish long living response with status code: '.format( response.status_code)) return response