def _addMaterialType(self, text, user): from MaKaC.common.fossilize import fossilize from MaKaC.fossils.conference import ILocalFileExtendedFossil, ILinkFossil Logger.get('requestHandler').debug('Adding %s - request %s' % (self._uploadType, request)) mat, newlyCreated = self._getMaterial() # if the material still doesn't exist, create it if newlyCreated: protectedAtResourceLevel = False else: protectedAtResourceLevel = True resources = [] assert self._uploadType == "file" for fileEntry in self._files: resource = LocalFile() resource.setFileName(fileEntry["fileName"]) resource.setFilePath(fileEntry["filePath"]) resource.setDescription(self._description) if self._displayName == "": resource.setName(resource.getFileName()) else: resource.setName(self._displayName) resources.append(resource) status = "OK" info = resources # forcedFileId - in case there is a conflict, use the file that is # already stored repoIDs = [] for i, resource in enumerate(resources): if self._repositoryIds: mat.addResource(resource, forcedFileId=self._repositoryIds[i]) else: mat.addResource(resource, forcedFileId=None) # store the repo id, for files if isinstance(resource, LocalFile) and self._repositoryIds is None: repoIDs.append(resource.getRepositoryId()) if protectedAtResourceLevel: protectedObject = resource else: protectedObject = mat mat.setHidden(self._visibility) mat.setAccessKey(self._password) protectedObject.setProtection(self._statusSelection) for principal in map(principal_from_fossil, self._userList): protectedObject.grantAccess(principal) if self._repositoryIds is None: self._repositoryIds = repoIDs return mat, status, fossilize(info, {"MaKaC.conference.Link": ILinkFossil, "MaKaC.conference.LocalFile": ILocalFileExtendedFossil})
def create_next(cls, registrant, amount, currency, action, provider='_manual', data=None): event = registrant.getConference() new_transaction = PaymentTransaction(event_id=event.getId(), registrant_id=registrant.getId(), amount=amount, currency=currency, provider=provider, data=data) double_payment = False previous_transaction = cls.find_latest_for_registrant(registrant) try: next_status = TransactionStatusTransition.next(previous_transaction, action, provider) except InvalidTransactionStatus as e: Logger.get('payment').exception("{}\nData received: {}".format(e, data)) return None, None except InvalidManualTransactionAction as e: Logger.get('payment').exception("Invalid manual action code '{}' on initial status\n" "Data received: {}".format(e, data)) return None, None except InvalidTransactionAction as e: Logger.get('payment').exception("Invalid action code '{}' on initial status\n" "Data received: {}".format(e, data)) return None, None except IgnoredTransactionAction as e: Logger.get('payment').warning("{}\nData received: {}".format(e, data)) return None, None except DoublePaymentTransaction: next_status = TransactionStatus.successful double_payment = True Logger.get('payment').warning("Received successful payment for an already paid registrant") new_transaction.status = next_status return new_transaction, double_payment
def _process(self): if not self.plugin.can_manage_vc_rooms(session.user, self.event_new): flash( _("You are not allowed to refresh {plugin_name} rooms for this event.").format( plugin_name=self.plugin.friendly_name ), "error", ) return redirect(url_for(".manage_vc_rooms", self.event_new)) Logger.get("modules.vc").info("Refreshing VC room {} from event {}".format(self.vc_room, self._conf)) try: self.plugin.refresh_room(self.vc_room, self.event_new) except VCRoomNotFoundError as err: Logger.get("modules.vc").warning("VC room '{}' not found. Setting it as deleted.".format(self.vc_room)) self.vc_room.status = VCRoomStatus.deleted flash(err.message, "error") return redirect(url_for(".manage_vc_rooms", self.event_new)) flash( _("{plugin_name} room '{room.name}' refreshed").format( plugin_name=self.plugin.friendly_name, room=self.vc_room ), "success", ) return redirect(url_for(".manage_vc_rooms", self.event_new))
def OAuthCheckAccessResource(cls): from indico.modules.oauth.db import ConsumerHolder, AccessTokenHolder, OAuthServer oauth_request = oauth.Request.from_request(request.method, request.base_url, request.headers, parameters=create_flat_args()) Logger.get('oauth.resource').info(oauth_request) try: now = nowutc() consumer_key = oauth_request.get_parameter('oauth_consumer_key') if not ConsumerHolder().hasKey(consumer_key): raise OAuthError('Invalid Consumer Key', 401) consumer = ConsumerHolder().getById(consumer_key) token = oauth_request.get_parameter('oauth_token') if not token or not AccessTokenHolder().hasKey(token): raise OAuthError('Invalid Token', 401) access_token = AccessTokenHolder().getById(token) oauth_consumer = oauth.Consumer(consumer.getId(), consumer.getSecret()) OAuthServer.getInstance().verify_request(oauth_request, oauth_consumer, access_token.getToken()) if access_token.getConsumer().getId() != oauth_consumer.key: raise OAuthError('Invalid Consumer Key', 401) elif (now - access_token.getTimestamp()) > timedelta(seconds=Config.getInstance().getOAuthAccessTokenTTL()): raise OAuthError('Expired Token', 401) return access_token except oauth.Error, e: if e.message.startswith("Invalid Signature"): raise OAuthError("Invalid Signature", 401) else: raise OAuthError(e.message, 400)
def _send(msgData): server = smtplib.SMTP(*Config.getInstance().getSmtpServer()) if Config.getInstance().getSmtpUseTLS(): server.ehlo() (code, errormsg) = server.starttls() if code != 220: raise MaKaCError(_("Can't start secure connection to SMTP server: %d, %s") % (code, errormsg)) if Config.getInstance().getSmtpLogin(): login = Config.getInstance().getSmtpLogin() password = Config.getInstance().getSmtpPassword() (code, errormsg) = server.login(login, password) if code != 235: raise MaKaCError(_("Can't login on SMTP server: %d, %s") % (code, errormsg)) to_addrs = msgData["toList"] | msgData["ccList"] | msgData["bccList"] try: Logger.get("mail").info( "Sending email: To: {} / CC: {} / BCC: {}".format( ", ".join(msgData["toList"]) or "None", ", ".join(msgData["ccList"]) or "None", ", ".join(msgData["bccList"]) or "None", ) ) server.sendmail(msgData["fromAddr"], to_addrs, msgData["msg"]) except smtplib.SMTPRecipientsRefused as e: raise MaKaCError("Email address is not valid: {}".format(e.recipients)) finally: server.quit() Logger.get("mail").info("Mail sent to {}".format(", ".join(to_addrs)))
def eventTitleChanged(cls, obj, oldTitle, newTitle): obj = Catalog.getIdx("cs_bookingmanager_conference").get(obj.getConference().getId()) try: obj.notifyTitleChange(oldTitle, newTitle) except Exception, e: Logger.get('PluginNotifier').error("Exception while trying to access to the title parameters when changing an event title" + str(e))
def strip_ml_tags(in_text): """ Description: Removes all HTML/XML-like tags from the input text. Inputs: s --> string of text Outputs: text string without the tags # doctest unit testing framework >>> test_text = "Keep this Text <remove><me /> KEEP </remove> 123" >>> strip_ml_tags(test_text) 'Keep this Text KEEP 123' """ # convert in_text to a mutable object (e.g. list) s_list = list(in_text) i = 0 while i < len(s_list): # iterate until a left-angle bracket is found if s_list[i] == '<': try: while s_list[i] != '>': # pop everything from the the left-angle bracket until the right-angle bracket s_list.pop(i) except IndexError,e: Logger.get('strip_ml_tags').debug("Not found '>' (the end of the html tag): %s"%e) continue # pops the right-angle bracket, too s_list.pop(i) else: i=i+1
def isRoomConnected(cls, booking, roomIp="", roomPanoramaUser=""): try: if roomIp != "": answer = RavemApi.isLegacyEndpointConnected(roomIp) else: answer = RavemApi.isVidyoPanoramaConnected(roomPanoramaUser) if not answer.ok or "error" in answer.json(): Logger.get('Vidyo').exception("""Evt:%s, booking:%s, Ravem API's isConnected operation not successfull: %s""" % (booking.getConference().getId(), booking.getId(), answer.text)) return VidyoError("roomCheckFailed", "roomConnected", _("There was a problem obtaining the room status from Vidyo. {0}").format(VidyoTools.getContactSupportText())) result = {"roomName": None, "isConnected": False, "service": None} answer = answer.json() if "result" in answer: for service in answer.get("result").get("services"): if service.get("name", "") == "videoconference": result["roomName"] = VidyoTools.recoverVidyoName(service.get("event_name")) result["isConnected"] = service.get("status") == 1 result["service"] = VidyoTools.recoverVidyoDescription(service.get("event_type")) return result except Exception: return VidyoError("roomCheckFailed", "roomConnected", _("There was a problem obtaining the room status from Vidyo. {0}").format( VidyoTools.getContactSupportText()))
def process(): responseBody = { 'version': '1.1', 'error': None, 'result': None } requestBody = None try: # init/clear fossil cache clearCache() # read request try: requestBody = request.get_json() Logger.get('rpc').info('json rpc request. request: {0}'.format(requestBody)) except BadRequest: raise RequestError('ERR-R1', 'Invalid mime-type.') if not requestBody: raise RequestError('ERR-R2', 'Empty request.') if 'id' in requestBody: responseBody['id'] = requestBody['id'] # run request responseBody['result'] = ServiceRunner().invokeMethod(str(requestBody['method']), requestBody.get('params', [])) except CausedError as e: try: errorInfo = fossilize(e) except NonFossilizableException as e2: # catch Exceptions that are not registered as Fossils # and log them errorInfo = {'code': '', 'message': str(e2)} Logger.get('dev').exception('Exception not registered as fossil') # NoReport errors (i.e. not logged in) shouldn't be logged if not isinstance(e, NoReportError): Logger.get('rpc').exception('Service request failed. ' 'Request text:\r\n{0}\r\n\r\n'.format(requestBody)) if requestBody: params = requestBody.get('params', []) Sanitization._escapeHTML(params) errorInfo["requestInfo"] = { 'method': str(requestBody['method']), 'params': params, 'origin': str(requestBody.get('origin', 'unknown')) } Logger.get('rpc').debug('Arguments: {0}'.format(errorInfo['requestInfo'])) responseBody['error'] = errorInfo try: jsonResponse = dumps(responseBody, ensure_ascii=True) except UnicodeError: Logger.get('rpc').exception('Problem encoding JSON response') # This is to avoid exceptions due to old data encodings (based on iso-8859-1) responseBody['result'] = fix_broken_obj(responseBody['result']) jsonResponse = encode(responseBody) return app.response_class(jsonResponse, mimetype='application/json')
def _sendReport( self ): cfg = Config.getInstance() # if no e-mail address was specified, # add a default one if self._userMail: fromAddr = self._userMail else: fromAddr = '*****@*****.**' toAddr = Config.getInstance().getSupportEmail() Logger.get('errorReport').debug('mailing %s' % toAddr) subject = "[Indico@%s] Error report"%cfg.getBaseURL() request_info = self._requestInfo or '' if isinstance(request_info, (dict, list)): request_info = pformat(request_info) # build the message body body = [ "-" * 20, "Error details\n", self._code, self._message, "Inner error: " + str(self._inner), request_info, "-" * 20 ] maildata = {"fromAddr": fromAddr, "toList": [toAddr], "subject": subject, "body": "\n".join(body)} GenericMailer.send(GenericNotification(maildata))
def to_serializable(self, attr='__public__', converters=None): serializable = {} if converters is None: converters = {} for k in getattr(self, attr): try: if isinstance(k, tuple): k, name = k else: k, name = k, k v = getattr(self, k) if callable(v): # to make it generic, we can get rid of it by properties v = v() if isinstance(v, Serializer): v = v.to_serializable() elif isinstance(v, list): v = [e.to_serializable() for e in v] elif isinstance(v, dict): v = dict((k, vv.to_serializable() if isinstance(vv, Serializer) else vv) for k, vv in v.iteritems()) elif isinstance(v, Enum): v = v.name if type(v) in converters: v = converters[type(v)](v) serializable[name] = v except Exception: msg = 'Could not retrieve {}.{}.'.format(self.__class__.__name__, k) Logger.get('Serializer{}'.format(self.__class__.__name__)).exception(msg) raise IndicoError(msg) return serializable
def _sendMail(self, operation): """ Overloads _sendMail behavior for EVO """ if operation == 'new': #notification to admin try: notification = notifications.NewVidyoPublicRoomNotificationAdmin(self) GenericMailer.sendAndLog(notification, self.getConference(), self.getPlugin().getName()) except Exception, e: Logger.get('Vidyo').error( """Could not send NewVidyoPublicRoomNotificationAdmin for booking with id %s of event with id %s, exception: %s""" % (self.getId(), self.getConference().getId(), str(e))) #notification to owner if isinstance(self.getOwnerObject(), Avatar): try: notification = notifications.VidyoOwnerChosenNotification(self) GenericMailer.sendAndLog(notification, self.getConference(), self.getPlugin().getName()) except Exception, e: Logger.get('Vidyo').error( """Could not send VidyoOwnerChosenNotification for booking with id %s of event with id %s, exception: %s""" % (self.getId(), self.getConference().getId(), str(e)))
def sendParticipantsEmail(self, operation): params = self.getBookingParams() try: if params.has_key('sendAttendeesEmail') and params['sendAttendeesEmail'][0].lower() == 'yes': recipients = [] for k in self._participants.keys(): recipients.append( self._participants[k]._email ) if len(recipients)>0: if operation == 'remove': notification = WebExParticipantNotification( self,recipients, operation ) GenericMailer.send( notification ) else: notification = WebExParticipantNotification( self,recipients, operation, additionalText="This is a WebEx meeting invitation.<br/><br/>" ) GenericMailer.send( notification ) if params.has_key('sendCreatorEmail') and params['sendCreatorEmail'][0].lower() == 'yes': recipients = MailTools.getManagersEmailList(self.getConference(), 'WebEx') notification = WebExParticipantNotification( self,recipients, operation, additionalText="Dear event manager:<br/><br/>\n\n " ) GenericMailer.send( notification ) if params.has_key('sendSelfEmail') and params['sendSelfEmail'][0].lower() == 'yes' and params.has_key("loggedInEmail") and params["loggedInEmail"] != "": recipients = [ params["loggedInEmail"] ] notification = WebExParticipantNotification( self,recipients, operation, additionalText="You are receiving this email because you requested it when creating a WebEx booking via Indico.<br/><br/>\n\n " ) GenericMailer.send( notification ) except Exception,e: Logger.get('WebEx').error( """Could not send participant email for booking with id %s of event with id %s, operation %s, exception: %s""" % (self.getId(), self.getConference().getId(), operation, str(e))) Logger.get('WebEx').error( MailTools.getManagersEmailList(self.getConference(), 'WebEx') ) self._warning = _("The operation appears to have been successful, however there was an error in sending the emails to participants: %s" % str(e) )
def getWebExTimeZoneToUTC( self, tz_id, the_date ): """ The date is required because the WebEx server responds with the offset of GMT time based on that date, adjusted for daylight savings, etc """ params = self.getBookingParams() request_xml = """<?xml version="1.0\" encoding="UTF-8"?> <serv:message xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:serv=\"http://www.webex.com/schemas/2002/06/service" > <header> <securityContext> <webExID>%(username)s</webExID> <password>%(password)s</password> <siteID>%(siteID)s</siteID> <partnerID>%(partnerID)s</partnerID> </securityContext> </header> <body> <bodyContent xsi:type="site.LstTimeZone" > <timeZoneID>%(tz_id)s</timeZoneID> <date>%(date)s</date> </bodyContent> </body> </serv:message> """ % ( { "username" : params['webExUser'], "password" : self.getWebExPass(), "siteID" : getWebExOptionValueByName("WESiteID"), "partnerID" : getWebExOptionValueByName("WEPartnerID"), "tz_id":tz_id, "date":the_date } ) response_xml = sendXMLRequest( request_xml ) dom = xml.dom.minidom.parseString( response_xml ) offset = dom.getElementsByTagName( "ns1:gmtOffset" )[0].firstChild.toxml('utf-8') try: return int(offset) except: Logger.get('WebEx').debug( "Eror requesting time zone offset from WebEx:\n\n%s" % ( response_xml ) ) return None
def create_next(cls, registration, amount, currency, action, provider=None, data=None): previous_transaction = registration.transaction new_transaction = PaymentTransaction(amount=amount, currency=currency, provider=provider, data=data) try: next_status = TransactionStatusTransition.next(previous_transaction, action, provider) except InvalidTransactionStatus as e: Logger.get('payment').exception("%s (data received: %r)", e, data) return None except InvalidManualTransactionAction as e: Logger.get('payment').exception("Invalid manual action code '%s' on initial status (data received: %r)", e, data) return None except InvalidTransactionAction as e: Logger.get('payment').exception("Invalid action code '%s' on initial status (data received: %r)", e, data) return None except IgnoredTransactionAction as e: Logger.get('payment').warning("%s (data received: %r)", e, data) return None except DoublePaymentTransaction: next_status = TransactionStatus.successful Logger.get('payment').info("Received successful payment for an already paid registration") registration.transaction = new_transaction new_transaction.status = next_status return new_transaction
def pytest_configure(config): # Load all the plugins defined in pytest_plugins config.pluginmanager.consider_module(sys.modules[__name__]) config.indico_temp_dir = py.path.local(tempfile.mkdtemp(prefix="indicotesttmp.")) plugins = filter(None, [x.strip() for x in re.split(r"[\s,;]+", config.getini("indico_plugins"))]) # Throw away all indico.conf options early Config.getInstance().reset( { "DBConnectionParams": ("localhost", 0), # invalid port - just so we never connect to a real ZODB! "SmtpServer": ("localhost", 0), # invalid port - just in case so we NEVER send emails! "CacheBackend": "null", "Loggers": [], "UploadedFilesTempDir": config.indico_temp_dir.strpath, "XMLCacheDir": config.indico_temp_dir.strpath, "ArchiveDir": config.indico_temp_dir.strpath, "StorageBackends": {"default": config.indico_temp_dir}, "AttachmentStorage": "default", "Plugins": plugins, "SecretKey": os.urandom(16), } ) # Make sure we don't write any log files (or worse: send emails) Logger.reset() del logging.root.handlers[:] logging.root.addHandler(logging.NullHandler()) # Silence the annoying pycountry logger import pycountry.db pycountry.db.logger.addHandler(logging.NullHandler())
def pytest_configure(config): # Load all the plugins defined in pytest_plugins config.pluginmanager.consider_module(sys.modules[__name__]) config.indico_temp_dir = py.path.local(tempfile.mkdtemp(prefix='indicotesttmp.')) plugins = filter(None, [x.strip() for x in re.split(r'[\s,;]+', config.getini('indico_plugins'))]) # Throw away all indico.conf options early Config.getInstance().reset({ 'DBConnectionParams': ('localhost', 0), # invalid port - just so we never connect to a real ZODB! 'SmtpServer': ('localhost', 0), # invalid port - just in case so we NEVER send emails! 'CacheBackend': 'null', 'Loggers': [], 'UploadedFilesTempDir': config.indico_temp_dir.strpath, 'XMLCacheDir': config.indico_temp_dir.strpath, 'ArchiveDir': config.indico_temp_dir.strpath, 'StorageBackends': {'default': config.indico_temp_dir}, 'AttachmentStorage': 'default', 'Plugins': plugins, 'SecretKey': os.urandom(16) }) # Make sure we don't write any log files (or worse: send emails) Logger.reset() del logging.root.handlers[:] logging.root.addHandler(logging.NullHandler()) # Silence the annoying pycountry logger import pycountry.db pycountry.db.logger.addHandler(logging.NullHandler())
def _run(self): Logger.get('OfflineEventGeneratorTask').info("Started generation of the offline website for task: %s" % self._task.id) setLocale(self._task.avatar.getLang()) self._rh = RHCustomizable() self._aw = self._rh._aw = AccessWrapper() self._rh._conf = self._rh._target = self._task.conference ContextManager.set('currentRH', self._rh) ContextManager.set('offlineMode', True) # Get event type wf = self._rh.getWebFactory() if wf: eventType = wf.getId() else: eventType = "conference" try: websiteZipFile = OfflineEvent(self._rh, self._rh._conf, eventType).create() except Exception, e: Logger.get('OfflineEventGeneratorTask').exception("Generation of the offline website for task %s failed \ with message error: %s" % (self._task.id, e)) self._task.status = "Failed" return
def create_next(cls, registration, amount, currency, action, provider=None, data=None): previous_transaction = registration.transaction new_transaction = PaymentTransaction(amount=amount, currency=currency, provider=provider, data=data) registration.transaction = new_transaction double_payment = False try: next_status = TransactionStatusTransition.next(previous_transaction, action, provider) except InvalidTransactionStatus as e: Logger.get('payment').exception("{}\nData received: {}".format(e, data)) return None, None except InvalidManualTransactionAction as e: Logger.get('payment').exception("Invalid manual action code '{}' on initial status\n" "Data received: {}".format(e, data)) return None, None except InvalidTransactionAction as e: Logger.get('payment').exception("Invalid action code '{}' on initial status\n" "Data received: {}".format(e, data)) return None, None except IgnoredTransactionAction as e: Logger.get('payment').warning("{}\nData received: {}".format(e, data)) return None, None except DoublePaymentTransaction: next_status = TransactionStatus.successful double_payment = True Logger.get('payment').warning("Received successful payment for an already paid registration") new_transaction.status = next_status return new_transaction, double_payment
def updateMicalaCDSExport(cls, cds_indico_matches, cds_indico_pending): '''If there are records found in CDS but not yet listed in the micala database as COMPLETE, then update it. cds_indico_matches is a dictionary of key-value pairs { IndicoID1: CDSID1, IndicoID2: CDSID2, ... } cds_indico_pending is a list of IndicoIDs (for whom the CDS export task has been started but not completed).''' # Logger.get('RecMan').debug('in updateMicalaCDSExport()') # debugging: # for matched in cds_indico_matches.keys(): # Logger.get('RecMan').debug('Looping through cds_indico_matches: %s -> %s' % (matched, cds_indico_matches[matched])) # for pending in cds_indico_pending: # Logger.get('RecMan').debug('Looping through cds_indico_pending: %s' % pending) for pending in cds_indico_pending: # Logger.get('RecMan').debug('Looping through cds_indico_pending: %s (and looking up in cds_indico_matches)' % pending) try: newRecord = cds_indico_matches[pending] idMachine = cls.getIdMachine(CollaborationTools.getOptionValue("RecordingManager", "micalaDBMachineName")) idTask = cls.getIdTask(CollaborationTools.getOptionValue("RecordingManager", "micalaDBStatusExportCDS")) idLecture = cls.getIdLecture(pending) cls.reportStatus("COMPLETE", "CDS record: %s" % newRecord, idMachine, idTask, idLecture) # add the CDS record number to the Lectures table resultAssociateCDSRecord = cls.associateCDSRecordToLOID(newRecord, idLecture) if not resultAssociateCDSRecord["success"]: Logger.get('RecMan').error("Unable to update Lectures table in micala database: %s" % resultAssociateCDSRecord["result"]) # this is not currently used: return resultAssociateCDSRecord["result"] except KeyError: # current pending lecture still not found in CDS so do nothing. Logger.get('RecMan').debug('%s listed as pending and not found in cds_indico_matches, so it must still be pending.' % pending)
def findGroupMemberUids(self, group): """ Finds uids of users in a group. Depends on groupStyle (SLAPD/ActiveDirectory) """ Logger.get('auth.ldap').debug('findGroupMemberUids(%s)' % group) groupDN = self._findDNOfGroup(group) if not groupDN: return [] # In ActiveDirectory users have multivalued attribute 'memberof' with list of groups # In SLAPD groups have multivalues attribute 'member' with list of users if self.groupStyle == 'ActiveDirectory': return self.nestedSearch(groupDN, {}) elif self.groupStyle == 'SLAPD': #read member attibute values from the group object members = None res = self.l.search_s(groupDN, ldap.SCOPE_BASE) for dn, data in res: if dn: members = data['member'] if not members: return [] memberUids = [] for memberDN in members: memberuid = LDAPTools.extractUIDFromDN(memberDN) if memberuid: memberUids.add(memberuid) Logger.get('auth.ldap').debug('findGroupMemberUids(%s) returns %s' % (group, memberUids)) return memberUids else: raise Exception("Unknown LDAP group style, choices are: SLAPD or ActiveDirectory")
def createIdentity(self, li, avatar): Logger.get("auth.ldap").info("createIdentity %s (%s %s)" % (li.getLogin(), avatar.getId(), avatar.getEmail())) data = self.checkLoginPassword(li.getLogin(), li.getPassword()) if data: return LDAPIdentity(li.getLogin(), avatar) else: return None
def make_app(set_path=False, db_setup=True, testing=False): # If you are reading this code and wonder how to access the app: # >>> from flask import current_app as app # This only works while inside an application context but you really shouldn't have any # reason to access it outside this method without being inside an application context. # When set_path is enabled, SERVER_NAME and APPLICATION_ROOT are set according to BaseURL # so URLs can be generated without an app context, e.g. in the indico shell if _app_ctx_stack.top: Logger.get('flask').warn('make_app({}) called within app context, using existing app:\n{}'.format( set_path, '\n'.join(traceback.format_stack()))) return _app_ctx_stack.top.app app = IndicoFlask('indico', static_folder=None, template_folder='web/templates') app.config['TESTING'] = testing fix_root_path(app) configure_app(app, set_path) setup_jinja(app) with app.app_context(): setup_assets() if db_setup: configure_db(app) extend_url_map(app) add_handlers(app) add_blueprints(app) if app.config['INDICO_COMPAT_ROUTES']: add_compat_blueprints(app) if not app.config['TESTING']: add_plugin_blueprints(app) Logger.init_app(app) return app
def get(self, key, default=None): self._connect() res = self._client.get(self._makeKey(key)) Logger.get('GenericCache/%s' % self._namespace).debug('GET %r -> %r' % (key, res is not None)) if res is None: return default return _NoneValue.restore(res)
def get(self, key, default=None): self._connect() res = self._client.get(self._makeKey(key)) Logger.get('cache.generic').debug('GET %s %r (%s)', self._namespace, key, 'HIT' if res is not None else 'MISS') if res is None: return default return _NoneValue.restore(res)
def _send(msgData): server=smtplib.SMTP(*Config.getInstance().getSmtpServer()) if Config.getInstance().getSmtpUseTLS(): server.ehlo() (code, errormsg) = server.starttls() if code != 220: raise MaKaCError( _("Can't start secure connection to SMTP server: %d, %s")%(code, errormsg)) if Config.getInstance().getSmtpLogin(): login = Config.getInstance().getSmtpLogin() password = Config.getInstance().getSmtpPassword() (code, errormsg) = server.login(login, password) if code != 235: raise MaKaCError( _("Can't login on SMTP server: %d, %s")%(code, errormsg)) to_addrs = msgData['toList'] | msgData['ccList'] | msgData['bccList'] try: Logger.get('mail').info('Sending email: To: {} / CC: {} / BCC: {}'.format( ', '.join(msgData['toList']) or 'None', ', '.join(msgData['ccList']) or 'None', ', '.join(msgData['bccList']) or 'None')) server.sendmail(msgData['fromAddr'], to_addrs, msgData['msg']) except smtplib.SMTPRecipientsRefused as e: raise MaKaCError('Email address is not valid: {}'.format(e.recipients)) finally: server.quit() Logger.get('mail').info('Mail sent to {}'.format(', '.join(to_addrs)))
def _check_version(self, distribution, current_version=None): try: response = requests.get('https://pypi.org/pypi/{}/json'.format(distribution)) except requests.RequestException as exc: Logger.get('versioncheck').warning('Version check for %s failed: %s', distribution, exc) raise NoReportError.wrap_exc(ServiceUnavailable()) try: data = response.json() except ValueError: return None if current_version is None: try: current_version = get_distribution(distribution).version except DistributionNotFound: return None current_version = Version(current_version) if current_version.is_prerelease: # if we are on a prerelease, get the latest one even if it's also a prerelease latest_version = Version(data['info']['version']) else: # if we are stable, get the latest stable version versions = [v for v in map(Version, data['releases']) if not v.is_prerelease] latest_version = max(versions) if versions else None return {'current_version': unicode(current_version), 'latest_version': unicode(latest_version) if latest_version else None, 'outdated': (current_version < latest_version) if latest_version else False}
def _process(self): # TODO: Token should have flag authorized=False token = oauth.Token(OAuthUtils.gen_random_string(), OAuthUtils.gen_random_string()) token.set_callback(self._oauth_request.get_parameter('oauth_callback')) timestamp = nowutc() TempRequestTokenHolder().add(Token(token.key, token, timestamp, self._consumer, None)) Logger.get('oauth.request_token').info(token.to_string()) return token.to_string()
def set(self, key, val, ttl=0): try: if ttl: self._client.setex(key, ttl, pickle.dumps(val)) else: self._client.set(key, pickle.dumps(val)) except redis.RedisError: Logger.get('cache.redis').exception('set(%r, %r, %r) failed', key, val, ttl)
def unindex(self, entryId): intId = self.getInteger(entryId) if intId != None: self.removeString(entryId) self._textIdx.unindex_doc(intId) else: Logger.get('indexes.text').error("No such entry '%s'" % entryId)
def handle_exception(exception): Logger.get('wsgi').exception(exception.message or 'WSGI Exception') if app.debug: raise msg = (str(exception), _("An unexpected error ocurred.")) return WErrorWSGI(msg).getHTML(), 500
from flask import session from indico.core import signals from indico.core.logger import Logger from indico.modules.attachments.logging import connect_log_signals from indico.modules.attachments.models.attachments import Attachment from indico.modules.attachments.models.folders import AttachmentFolder from indico.modules.attachments.util import can_manage_attachments from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem __all__ = ('logger', 'Attachment', 'AttachmentFolder') logger = Logger.get('attachments') connect_log_signals() @signals.users.merged.connect def _merge_users(target, source, **kwargs): from indico.modules.attachments.models.attachments import Attachment, AttachmentFile from indico.modules.attachments.models.principals import AttachmentFolderPrincipal, AttachmentPrincipal Attachment.query.filter_by(user_id=source.id).update({Attachment.user_id: target.id}) AttachmentFile.query.filter_by(user_id=source.id).update({AttachmentFile.user_id: target.id}) AttachmentPrincipal.merge_users(target, source, 'attachment') AttachmentFolderPrincipal.merge_users(target, source, 'folder') @signals.menu.items.connect_via('event-management-sidemenu') def _extend_event_management_menu(sender, event, **kwargs):
def logger(self): from indico.core.logger import Logger logger = Logger.get('db') logger.setLevel(logging.DEBUG) return logger
from indico.core.logger import Logger from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem class IndicoOAuth2Provider(OAuth2Provider): def init_app(self, app): app.config.setdefault('OAUTH2_PROVIDER_ERROR_ENDPOINT', 'oauth.oauth_errors') app.config.setdefault('OAUTH2_PROVIDER_TOKEN_EXPIRES_IN', int(timedelta(days=3650).total_seconds())) app.config.setdefault('OAUTH2_PROVIDER_TOKEN_GENERATOR', lambda req: unicode(uuid4())) super(IndicoOAuth2Provider, self).init_app(app) oauth = IndicoOAuth2Provider() logger = Logger.get('oauth') @signals.menu.items.connect_via('admin-sidemenu') def _extend_admin_menu(sender, **kwargs): if session.user.is_admin: return SideMenuItem('applications', 'Applications', url_for('oauth.apps'), section='integration') @signals.menu.items.connect_via('user-profile-sidemenu') def _extend_profile_sidemenu(sender, user, **kwargs): yield SideMenuItem('applications', _('Applications'), url_for('oauth.user_profile'), 40, disabled=user.is_system) @signals.app_created.connect def _no_ssl_required_on_debug(app, **kwargs):
# You should have received a copy of the GNU General Public License # along with Indico; if not, see <http://www.gnu.org/licenses/>. from persistent import Persistent from MaKaC.common import indexes, mail from MaKaC.common.info import HelperMaKaCInfo from MaKaC.webinterface import urlHandlers from MaKaC.webinterface.mail import GenericNotification from indico.core import signals from indico.core.config import Config from indico.core.logger import Logger from indico.modules.auth.util import url_for_register logger = Logger.get('pending') class PendingHolder(object): """ This is an index that holds all the requests to add pending people to become Indico users. Those participants are not Avatars yet (do not have Indico account) and that's why they are in this pending queue. So once they become Indico users they will be removed from the index""" def __init__(self): """Index by email of all the request and all the tasks with the reminders""" self._id = "" self._idx = None # All the pending users def getPendingByEmail(self, email): return self._idx.matchPendingUser(email)
# This file is part of Indico. # Copyright (C) 2002 - 2021 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from indico.core import signals from indico.core.logger import Logger logger = Logger.get('files') @signals.import_tasks.connect def _import_tasks(sender, **kwargs): import indico.modules.files.tasks # noqa: F401
from flask import session from indico.core import signals from indico.core.logger import Logger from indico.modules.events.agreements.base import AgreementPersonInfo, AgreementDefinitionBase from indico.modules.events.agreements.models.agreements import Agreement from indico.modules.events.agreements.placeholders import PersonNamePlaceholder, AgreementLinkPlaceholder from indico.modules.events.agreements.util import get_agreement_definitions from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem __all__ = ('AgreementPersonInfo', 'AgreementDefinitionBase') logger = Logger.get('agreements') @signals.app_created.connect def _check_agreement_definitions(app, **kwargs): # This will raise RuntimeError if the agreement definition types are not unique get_agreement_definitions() @signals.menu.items.connect_via('event-management-sidemenu') def _extend_event_management_menu(sender, event, **kwargs): if not get_agreement_definitions(): return if not event.can_manage(session.user): return return SideMenuItem('agreements', _('Agreements'), url_for('agreements.event_agreements', event),
from indico.core import signals from indico.core.config import config from indico.core.logger import Logger from indico.core.settings import SettingsProxy from indico.modules.rb.models.blocking_principals import BlockingPrincipal from indico.modules.rb.models.blockings import Blocking from indico.modules.rb.models.locations import Location from indico.modules.rb.models.reservations import Reservation from indico.modules.rb.models.rooms import Room from indico.modules.rb.util import rb_is_admin from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem, SideMenuSection, TopMenuItem logger = Logger.get('rb') rb_settings = SettingsProxy('roombooking', { 'google_maps_api_key': '', 'assistance_emails': [], 'vc_support_emails': [], 'excluded_categories': [], 'notification_before_days': 2, 'notification_before_days_weekly': 5, 'notification_before_days_monthly': 7, 'notifications_enabled': True, 'booking_limit': 365, 'tileserver_url': '' }, acls={'admin_principals', 'authorized_principals'})
from indico.core import signals from indico.core.logger import Logger from indico.core.permissions import ManagementPermission from indico.modules.events.abstracts.clone import AbstractSettingsCloner from indico.modules.events.abstracts.notifications import ContributionTypeCondition, StateCondition, TrackCondition from indico.modules.events.features.base import EventFeature from indico.modules.events.models.events import Event, EventType from indico.modules.events.timetable.models.breaks import Break from indico.util.i18n import _ from indico.util.placeholders import Placeholder from indico.web.flask.templating import template_hook from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.abstracts') @signals.event.updated.connect @signals.event.contribution_created.connect @signals.event.contribution_updated.connect @signals.event.contribution_deleted.connect @signals.event.session_deleted.connect @signals.event.session_updated.connect @signals.event.person_updated.connect @signals.event.times_changed.connect def _clear_boa_cache(sender, obj=None, **kwargs): from indico.modules.events.abstracts.util import clear_boa_cache if isinstance(obj, Break): # breaks do not show up in the BoA return
# Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from __future__ import unicode_literals from flask import flash, request, session from indico.core import signals from indico.core.logger import Logger from indico.modules.events.settings import EventSettingsProxy from indico.util.i18n import _, ngettext from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.features') features_event_settings = EventSettingsProxy('features', {'enabled': None}) @signals.menu.items.connect_via('event-management-sidemenu') def _extend_event_management_menu(sender, event, **kwargs): if not event.can_manage(session.user): return return SideMenuItem('features', _('Features'), url_for('event_features.index', event), section='advanced') @signals.app_created.connect def _check_feature_definitions(app, **kwargs):
from __future__ import unicode_literals from flask import session from indico.core import signals from indico.core.logger import Logger from indico.core.roles import ManagementRole from indico.modules.events import Event from indico.modules.events.models.events import EventType from indico.modules.events.tracks.clone import TrackCloner from indico.modules.events.tracks.models.tracks import Track from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('tracks') @signals.menu.items.connect_via('event-management-sidemenu') def _sidemenu_items(sender, event, **kwargs): if event.type_ == EventType.conference and event.can_manage(session.user): return SideMenuItem('program', _('Programme'), url_for('tracks.manage', event), section='organization') @signals.event.sidemenu.connect def _extend_event_menu(sender, **kwargs): from indico.modules.events.layout.util import MenuEntryData from indico.modules.events.tracks.settings import track_settings
# modify it under the terms of the MIT License; see the # LICENSE file for more details. from flask import flash, session from indico.core import signals from indico.core.logger import Logger from indico.core.permissions import ManagementPermission, check_permissions from indico.modules.events.sessions.models.sessions import Session from indico.modules.events.sessions.util import has_sessions_for_user from indico.modules.events.settings import EventSettingsProxy from indico.util.i18n import _, ngettext from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.sessions') session_settings = EventSettingsProxy( 'sessions', { # Whether session coordinators can manage contributions inside their sessions 'coordinators_manage_contributions': False, # Whether session coordinators can manage their session blocks 'coordinators_manage_blocks': False }) COORDINATOR_PRIV_SETTINGS = { 'manage-contributions': 'coordinators_manage_contributions', 'manage-blocks': 'coordinators_manage_blocks' } COORDINATOR_PRIV_TITLES = { 'manage-contributions': _('Contributions'),
from __future__ import unicode_literals import re import time from functools import wraps from types import GeneratorType from flask import g from indico.core.config import config from indico.core.db import db from indico.core.logger import Logger from indico.util.string import to_unicode, truncate logger = Logger.get('emails') def email_sender(fn): @wraps(fn) def wrapper(*args, **kwargs): mails = fn(*args, **kwargs) if mails is None: return if isinstance(mails, GeneratorType): mails = list(mails) elif not isinstance(mails, list): mails = [mails] for mail in filter(None, mails): send_email(mail) return wrapper
from indico.core.db.sqlalchemy.principals import PrincipalType from indico.core.logger import Logger from indico.core.permissions import ManagementPermission, check_permissions, get_available_permissions from indico.modules.events.cloning import get_event_cloners from indico.modules.events.logs import EventLogKind, EventLogRealm from indico.modules.events.models.events import Event from indico.modules.events.models.legacy_mapping import LegacyEventMapping from indico.util.i18n import _, ngettext, orig_string from indico.util.string import is_legacy_id from indico.web.flask.templating import template_hook from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem, TopMenuItem, TopMenuSection __all__ = ('Event', 'logger', 'event_management_object_url_prefixes', 'event_object_url_prefixes') logger = Logger.get('events') #: URL prefixes for the various event objects (public area) #: All prefixes are expected to be used inside the '/event/<confId>' #: url space. event_object_url_prefixes = { 'event': [''], 'session': ['/sessions/<int:session_id>'], 'contribution': ['/contributions/<int:contrib_id>'], 'subcontribution': ['/contributions/<int:contrib_id>/subcontributions/<int:subcontrib_id>'] } #: URL prefixes for the various event objects (management area) #: All prefixes are expected to be used inside the '/event/<confId>' #: url space.
from indico.core import signals from indico.core.db import db from indico.core.logger import Logger from indico.core.permissions import ManagementPermission from indico.modules.events import Event from indico.modules.events.features.base import EventFeature from indico.modules.events.layout.util import MenuEntryData from indico.modules.events.models.events import EventType from indico.modules.events.registration.logging import connect_log_signals from indico.modules.events.registration.settings import RegistrationSettingsProxy from indico.util.i18n import _, ngettext from indico.web.flask.templating import template_hook from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.registration') connect_log_signals() registration_settings = RegistrationSettingsProxy( 'registrations', { # Whether to merge display forms on the participant list 'merge_registration_forms': True, # Columns to show on the participant list when the registration forms are merged 'participant_list_columns': ['first_name', 'last_name', 'affiliation'], # Order of the forms to show on the participant list 'participant_list_forms': [], # Columns to show for each form on the participant list 'participant_list_form_columns': {} })
from indico.core.logger import Logger from indico.core.notifications import make_email, send_email from indico.core.settings import SettingsProxy from indico.core.settings.converters import EnumConverter from indico.modules.users.ext import ExtraUserPreferences from indico.modules.users.models.settings import UserSetting, UserSettingsProxy from indico.modules.users.models.users import NameFormat, User from indico.util.i18n import _ from indico.web.flask.templating import get_template_module, template_hook from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem, TopMenuItem __all__ = ('ExtraUserPreferences', 'User', 'UserSetting', 'UserSettingsProxy', 'user_settings') logger = Logger.get('users') user_settings = UserSettingsProxy( 'users', { 'lang': None, 'timezone': None, 'force_timezone': False, # always use the user's timezone instead of an event's timezone 'show_future_events': False, 'show_past_events': False, 'name_format': NameFormat.first_last, 'use_previewer_pdf': True, 'synced_fields': None, # None to synchronize all fields, empty set to not synchronize 'suggest_categories':
def handler(prefix, path): path = posixpath.join('/', prefix, path) logger = Logger.get('httpapi') if request.method == 'POST': # Convert POST data to a query string queryParams = list(request.form.lists()) query = urlencode(queryParams, doseq=1) # we only need/keep multiple values so we can properly validate the signature. # the legacy code below expects a dict with just the first value. # if you write a new api endpoint that needs multiple values get them from # ``request.values.getlist()`` directly queryParams = {key: values[0] for key, values in queryParams} else: # Parse the actual query string queryParams = {key: value for key, value in request.args.items()} query = request.query_string.decode() apiKey = get_query_parameter(queryParams, ['ak', 'apikey'], None) cookieAuth = get_query_parameter(queryParams, ['ca', 'cookieauth'], 'no') == 'yes' signature = get_query_parameter(queryParams, ['signature']) timestamp = get_query_parameter(queryParams, ['timestamp'], 0, integer=True) noCache = get_query_parameter(queryParams, ['nc', 'nocache'], 'no') == 'yes' pretty = get_query_parameter(queryParams, ['p', 'pretty'], 'no') == 'yes' onlyPublic = get_query_parameter(queryParams, ['op', 'onlypublic'], 'no') == 'yes' onlyAuthed = get_query_parameter(queryParams, ['oa', 'onlyauthed'], 'no') == 'yes' scope = 'read:legacy_api' if request.method == 'GET' else 'write:legacy_api' oauth_token = None if request.headers.get('Authorization', '').lower().startswith('bearer '): try: oauth_token = require_oauth.acquire_token([scope]) except OAuth2Error as exc: raise BadRequest(f'OAuth error: {exc}') # Get our handler function and its argument and response type hook, dformat = HTTPAPIHook.parseRequest(path, queryParams) if hook is None or dformat is None: raise NotFound # Disable caching if we are not just retrieving data (or the hook requires it) if request.method == 'POST' or hook.NO_CACHE: noCache = True ak = error = result = None ts = int(time.time()) typeMap = {} status_code = None is_response = False try: used_session = None if cookieAuth: used_session = session if not used_session.user: # ignore guest sessions used_session = None if apiKey or oauth_token or not used_session: auth_token = None if not oauth_token: # Validate the API key (and its signature) ak, enforceOnlyPublic = checkAK(apiKey, signature, timestamp, path, query) if enforceOnlyPublic: onlyPublic = True # Create an access wrapper for the API key's user user = ak.user if ak and not onlyPublic else None else: # Access Token (OAuth) user = oauth_token.user if not onlyPublic else None # Get rid of API key in cache key if we did not impersonate a user if ak and user is None: cacheKey = normalizeQuery( path, query, remove=('_', 'ak', 'apiKey', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed', 'access_token')) else: cacheKey = normalizeQuery( path, query, remove=('_', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed', 'access_token')) if signature: # in case the request was signed, store the result under a different key cacheKey = 'signed_' + cacheKey if auth_token: # if oauth was used, we also make the cache key unique cacheKey = f'oauth-{auth_token.id}_{cacheKey}' else: # We authenticated using a session cookie. # XXX: This is not used anymore within indico and should be removed whenever we rewrite # the code here. token = request.headers.get( 'X-CSRF-Token', get_query_parameter(queryParams, ['csrftoken'])) if used_session.csrf_protected and used_session.csrf_token != token: raise HTTPAPIError('Invalid CSRF token', 403) user = used_session.user if not onlyPublic else None cacheKey = normalizeQuery(path, query, remove=('_', 'nc', 'nocache', 'ca', 'cookieauth', 'oa', 'onlyauthed', 'csrftoken')) if user is not None: # We *always* prefix the cache key with the user ID so we never get an overlap between # authenticated and unauthenticated requests cacheKey = f'user-{user.id}_{cacheKey}' else: cacheKey = f'public_{cacheKey}' # Bail out if the user requires authentication but is not authenticated if onlyAuthed and not user: raise HTTPAPIError('Not authenticated', 403) addToCache = not hook.NO_CACHE cacheKey = RE_REMOVE_EXTENSION.sub('', cacheKey) if not noCache: obj = API_CACHE.get(cacheKey) if obj is not None: result, extra, ts, complete, typeMap = obj addToCache = False if result is None: g.current_api_user = user # Perform the actual exporting res = hook(user) if isinstance(res, current_app.response_class): addToCache = False is_response = True result, extra, complete, typeMap = res, {}, True, {} elif isinstance(res, tuple) and len(res) == 4: result, extra, complete, typeMap = res else: result, extra, complete, typeMap = res, {}, True, {} if result is not None and addToCache: ttl = api_settings.get('cache_ttl') if ttl > 0: API_CACHE.set(cacheKey, (result, extra, ts, complete, typeMap), ttl) except HTTPAPIError as e: error = e if e.code: status_code = e.code if result is None and error is None: raise NotFound else: if ak and error is None: # Commit only if there was an API key and no error norm_path, norm_query = normalizeQuery(path, query, remove=('signature', 'timestamp'), separate=True) uri = '?'.join([_f for _f in (norm_path, norm_query) if _f]) ak.register_used(request.remote_addr, uri, not onlyPublic) db.session.commit() else: # No need to commit stuff if we didn't use an API key (nothing was written) # XXX do we even need this? db.session.rollback() # Log successful POST api requests if error is None and request.method == 'POST': logger.info('API request: %s?%s', path, query) if is_response: return result serializer = Serializer.create(dformat, query_params=queryParams, pretty=pretty, typeMap=typeMap, **hook.serializer_args) if error: if not serializer.schemaless: # if our serializer has a specific schema (HTML, ICAL, etc...) # use JSON, since it is universal serializer = Serializer.create('json') result = {'message': error.message} elif serializer.encapsulate: result = HTTPAPIResultSchema().dump( HTTPAPIResult(result, path, query, ts, extra)) try: data = serializer(result) response = current_app.make_response(data) content_type = serializer.get_response_content_type() if content_type: response.content_type = content_type if status_code: response.status_code = status_code return response except Exception: logger.exception('Serialization error in request %s?%s', path, query) raise
from __future__ import unicode_literals from flask import flash, session from indico.core import signals from indico.core.logger import Logger from indico.core.roles import ManagementRole, check_roles from indico.modules.events.contributions.contrib_fields import get_contrib_field_types from indico.modules.events.contributions.models.contributions import Contribution from indico.modules.events.contributions.models.fields import ContributionField from indico.util.i18n import _, ngettext from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.contributions') @signals.menu.items.connect_via('event-management-sidemenu') def _extend_event_management_menu(sender, event, **kwargs): if not event.can_manage(session.user): return if event.type == 'conference': return SideMenuItem('contributions', _('Contributions'), url_for('contributions.manage_contributions', event), section='organization') @signals.users.merged.connect
from indico.core import signals from indico.core.config import config from indico.core.db import db from indico.core.db.sqlalchemy.core import handle_sqlalchemy_database_error from indico.core.logger import Logger, sentry_set_tags from indico.core.notifications import flush_email_queue, init_email_queue from indico.legacy.common import fossilize from indico.legacy.common.security import Sanitization from indico.util.i18n import _ from indico.util.locators import get_locator from indico.util.signals import values_from_signal from indico.web.flask.util import ResponseUtil, create_flat_args, url_for HTTP_VERBS = {'GET', 'PATCH', 'POST', 'PUT', 'DELETE'} logger = Logger.get('rh') class RH(object): NOT_SANITIZED_FIELDS = frozenset() CSRF_ENABLED = True # require a csrf_token when accessing the RH with anything but GET EVENT_FEATURE = None # require a certain event feature when accessing the RH. See `EventFeature` for details DENY_FRAMES = False # whether to send an X-Frame-Options:DENY header CHECK_HTML = False # whether to run the legacy HTML sanitizer #: A dict specifying how the url should be normalized. #: `args` is a dictionary mapping view args keys to callables #: used to retrieve the expected value for those arguments if they #: are present in the request's view args. #: `locators` is a set of callables returning objects with locators. #: `preserved_args` is a set of view arg names which will always
# # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from datetime import timedelta from flask_caching import Cache from flask_caching.backends.rediscache import RedisCache from redis import RedisError from redis import from_url as redis_from_url from indico.core.config import config from indico.core.logger import Logger _logger = Logger.get('cache') class CachedNone: __slots__ = () @classmethod def wrap(cls, value): return cls() if value is None else value @classmethod def unwrap(cls, value, default=None): if value is None: return default elif isinstance(value, cls): return None
from jinja2.filters import do_filesizeformat from indico.core import signals from indico.core.logger import Logger from indico.core.settings.converters import EnumConverter from indico.modules.events.features.base import EventFeature from indico.modules.events.logs import EventLogKind, EventLogRealm from indico.modules.events.models.events import EventType from indico.modules.events.settings import EventSettingsProxy, ThemeSettingsProxy from indico.modules.users import NameFormat from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.layout') layout_settings = EventSettingsProxy('layout', { 'is_searchable': True, 'show_nav_bar': True, 'show_social_badges': True, 'name_format': None, 'show_banner': False, 'header_text_color': '', 'header_background_color': '', 'announcement': None, 'show_announcement': False, 'use_custom_css': False, 'theme': None, 'timetable_theme': None, 'timetable_theme_settings': {}, 'use_custom_menu': False,
def _process(self): if not self.plugin.can_manage_vc_rooms(session.user, self.event): flash( _('You are not allowed to modify {} rooms for this event.'). format(self.plugin.friendly_name), 'error') raise Forbidden form = self.plugin.create_form( self.event, existing_vc_room=self.vc_room, existing_event_vc_room=self.event_vc_room) if form.validate_on_submit(): self.plugin.update_data_vc_room(self.vc_room, form.data) event_vc_room = process_vc_room_association( self.plugin, self.event, self.vc_room, form, event_vc_room=self.event_vc_room, allow_same_room=True) if not event_vc_room: return jsonify_data(flash=False) self.vc_room.modified_dt = now_utc() try: self.plugin.update_room(self.vc_room, self.event) except VCRoomNotFoundError as err: Logger.get('modules.vc').warning( "VC room %r not found. Setting it as deleted.", self.vc_room) self.vc_room.status = VCRoomStatus.deleted flash(err.message, 'error') return jsonify_data(flash=False) except VCRoomError as err: if err.field is None: raise field = getattr(form, err.field) field.errors.append(err.message) db.session.rollback() else: # TODO # notify_modified(self.vc_room, self.event, session.user) flash( _("{plugin_name} room '{room.name}' updated").format( plugin_name=self.plugin.friendly_name, room=self.vc_room), 'success') return jsonify_data(flash=False) form_html = self.plugin.render_form(plugin=self.plugin, event=self.event, form=form, existing_vc_room=self.vc_room, skip_fields=form.skip_fields | {'name'}) return jsonify(html=form_html, js=_pop_injected_js())
from __future__ import unicode_literals from flask import session from indico.core import signals from indico.core.logger import Logger from indico.core.roles import ManagementRole, check_roles from indico.core.settings import SettingsProxy from indico.modules.categories.models.categories import Category from indico.modules.categories.models.legacy_mapping import LegacyCategoryMapping from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('categories') upcoming_events_settings = SettingsProxy('upcoming_events', { 'entries': [], 'max_entries': 10 }) @signals.import_tasks.connect def _import_tasks(sender, **kwargs): import indico.modules.categories.tasks @signals.users.merged.connect def _merge_users(target, source, **kwargs): from indico.modules.categories.models.principals import CategoryPrincipal
def normalize_url(self): """Performs URL normalization. This uses the :attr:`normalize_url_spec` to check if the URL params are what they should be and redirects or fails depending on the HTTP method used if it's not the case. :return: ``None`` or a redirect response """ if current_app.debug and self.normalize_url_spec is RH.normalize_url_spec: # in case of ``class SomeRH(RH, MixinWithNormalization)`` # the default value from `RH` overwrites the normalization # rule from ``MixinWithNormalization``. this is never what # the developer wants so we fail if it happens. the proper # solution is ``class SomeRH(MixinWithNormalization, RH)`` cls = next((x for x in inspect.getmro(self.__class__) if (x is not RH and x is not self.__class__ and hasattr(x, 'normalize_url_spec') and getattr(x, 'normalize_url_spec', None) is not RH.normalize_url_spec)), None) if cls is not None: raise Exception( 'Normalization rule of {} in {} is overwritten by base RH. Put mixins with class-level ' 'attributes on the left of the base class'.format( cls, self.__class__)) if not self.normalize_url_spec or not any( self.normalize_url_spec.itervalues()): return spec = { 'args': self.normalize_url_spec.get('args', {}), 'locators': self.normalize_url_spec.get('locators', set()), 'preserved_args': self.normalize_url_spec.get('preserved_args', set()), } # Initialize the new view args with preserved arguments (since those would be lost otherwise) new_view_args = { k: v for k, v in request.view_args.iteritems() if k in spec['preserved_args'] } # Retrieve the expected values for all simple arguments (if they are currently present) for key, getter in spec['args'].iteritems(): if key in request.view_args: new_view_args[key] = getter(self) # Retrieve the expected values from locators for getter in spec['locators']: value = getter(self) if value is None: raise NotFound( 'The URL contains invalid data. Please go to the previous page and refresh it.' ) new_view_args.update(get_locator(value)) # Get all default values provided by the url map for the endpoint defaults = set( itertools.chain.from_iterable( r.defaults for r in current_app.url_map.iter_rules(request.endpoint) if r.defaults)) def _convert(v): # some legacy code has numeric ids in the locator data, but still takes # string ids in the url rule (usually for confId) return unicode(v) if isinstance(v, (int, long)) else v provided = { k: _convert(v) for k, v in request.view_args.iteritems() if k not in defaults } new_view_args = {k: _convert(v) for k, v in new_view_args.iteritems()} if new_view_args != provided: if request.method in {'GET', 'HEAD'}: try: return redirect( url_for( request.endpoint, **dict(request.args.to_dict(), **new_view_args))) except BuildError as e: if current_app.debug: raise Logger.get('requestHandler').warn( 'BuildError during normalization: %s', e) raise NotFound else: raise NotFound( 'The URL contains invalid data. Please go to the previous page and refresh it.' )
def process(self, params): if request.method not in HTTP_VERBS: # Just to be sure that we don't get some crappy http verb we don't expect raise BadRequest cfg = Config.getInstance() forced_conflicts, max_retries, profile = cfg.getForceConflicts( ), cfg.getMaxRetries(), cfg.getProfile() profile_name, res, textLog = '', '', [] self._startTime = datetime.now() # clear the context ContextManager.destroy() ContextManager.set('currentRH', self) g.rh = self #redirect to https if necessary if self._checkHttpsRedirect(): return self._responseUtil.make_redirect() if self.EVENT_FEATURE is not None: self._check_event_feature() DBMgr.getInstance().startRequest() textLog.append("%s : Database request started" % (datetime.now() - self._startTime)) Logger.get('requestHandler').info('[pid=%s] Request %s started' % (os.getpid(), request)) is_error_response = False try: for i, retry in enumerate(transaction.attempts(max_retries)): with retry: if i > 0: signals.before_retry.send() try: Logger.get('requestHandler').info( '\t[pid=%s] from host %s' % (os.getpid(), request.remote_addr)) profile_name, res = self._process_retry( params, i, profile, forced_conflicts) signals.after_process.send() if i < forced_conflicts: # raise conflict error if enabled to easily handle conflict error case raise ConflictError transaction.commit() DBMgr.getInstance().endRequest(commit=False) break except (ConflictError, POSKeyError): transaction.abort() import traceback # only log conflict if it wasn't forced if i >= forced_conflicts: Logger.get('requestHandler').warning( 'Conflict in Database! (Request %s)\n%s' % (request, traceback.format_exc())) except ClientDisconnected: transaction.abort() Logger.get('requestHandler').warning( 'Client Disconnected! (Request {})'.format( request)) time.sleep(i) self._process_success() except Exception as e: transaction.abort() res = self._getMethodByExceptionName(e)(e) if isinstance(e, HTTPException) and e.response is not None: res = e.response is_error_response = True totalTime = (datetime.now() - self._startTime) textLog.append('{} : Request ended'.format(totalTime)) # log request timing if profile and totalTime > timedelta( 0, 1) and os.path.isfile(profile_name): rep = Config.getInstance().getTempDir() stats = pstats.Stats(profile_name) stats.strip_dirs() stats.sort_stats('cumulative', 'time', 'calls') stats.dump_stats(os.path.join(rep, 'IndicoRequestProfile.log')) output = StringIO.StringIO() sys.stdout = output stats.print_stats(100) sys.stdout = sys.__stdout__ s = output.getvalue() f = file(os.path.join(rep, 'IndicoRequest.log'), 'a+') f.write('--------------------------------\n') f.write('URL : {}\n'.format(request.url)) f.write('{} : start request\n'.format(self._startTime)) f.write('params:{}'.format(params)) f.write('\n'.join(textLog)) f.write(s) f.write('--------------------------------\n\n') f.close() if profile and profile_name and os.path.exists(profile_name): os.remove(profile_name) if self._responseUtil.call: return self._responseUtil.make_call() if is_error_response and isinstance( res, (current_app.response_class, Response)): # if we went through error handling code, responseUtil._status has been changed # so make_response() would fail return res # In case of no process needed, we should return empty string to avoid erroneous output # specially with getVars breaking the JS files. if not self._doProcess or res is None: return self._responseUtil.make_empty() return self._responseUtil.make_response(res)
def logger(cls): return Logger.get(f'plugin.{cls.name}')
def logger(cls): return Logger.get('plugin.{}'.format(cls.name))
# Indico is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # Indico is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Indico; if not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals from indico.core import signals from indico.core.logger import Logger from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('networks') @signals.menu.items.connect_via('admin-sidemenu') def _sidemenu_items(sender, **kwargs): yield SideMenuItem('ip_networks', _('IP Networks'), url_for('networks.manage'), section='security')
def handler(prefix, path): path = posixpath.join('/', prefix, path) ContextManager.destroy() clearCache() # init fossil cache logger = Logger.get('httpapi') if request.method == 'POST': # Convert POST data to a query string queryParams = dict((key, value.encode('utf-8')) for key, value in request.form.iteritems()) query = urllib.urlencode(queryParams) else: # Parse the actual query string queryParams = dict((key, value.encode('utf-8')) for key, value in request.args.iteritems()) query = request.query_string dbi = DBMgr.getInstance() dbi.startRequest() apiKey = get_query_parameter(queryParams, ['ak', 'apikey'], None) cookieAuth = get_query_parameter(queryParams, ['ca', 'cookieauth'], 'no') == 'yes' signature = get_query_parameter(queryParams, ['signature']) timestamp = get_query_parameter(queryParams, ['timestamp'], 0, integer=True) noCache = get_query_parameter(queryParams, ['nc', 'nocache'], 'no') == 'yes' pretty = get_query_parameter(queryParams, ['p', 'pretty'], 'no') == 'yes' onlyPublic = get_query_parameter(queryParams, ['op', 'onlypublic'], 'no') == 'yes' onlyAuthed = get_query_parameter(queryParams, ['oa', 'onlyauthed'], 'no') == 'yes' scope = 'read:legacy_api' if request.method == 'GET' else 'write:legacy_api' try: oauth_valid, oauth_request = oauth.verify_request([scope]) if not oauth_valid and oauth_request and oauth_request.error_message != 'Bearer token not found.': raise BadRequest('OAuth error: {}'.format( oauth_request.error_message)) elif g.get( 'received_oauth_token' ) and oauth_request.error_message == 'Bearer token not found.': raise BadRequest('OAuth error: Invalid token') except ValueError: # XXX: Dirty hack to workaround a bug in flask-oauthlib that causes it # not to properly urlencode request query strings # Related issue (https://github.com/lepture/flask-oauthlib/issues/213) oauth_valid = False # Get our handler function and its argument and response type hook, dformat = HTTPAPIHook.parseRequest(path, queryParams) if hook is None or dformat is None: raise NotFound # Disable caching if we are not just retrieving data (or the hook requires it) if request.method == 'POST' or hook.NO_CACHE: noCache = True ak = error = result = None ts = int(time.time()) typeMap = {} responseUtil = ResponseUtil() try: used_session = None if cookieAuth: used_session = session if not used_session.user: # ignore guest sessions used_session = None if apiKey or oauth_valid or not used_session: if not oauth_valid: # Validate the API key (and its signature) ak, enforceOnlyPublic = checkAK(apiKey, signature, timestamp, path, query) if enforceOnlyPublic: onlyPublic = True # Create an access wrapper for the API key's user aw = buildAW(ak, onlyPublic) else: # Access Token (OAuth) at = load_token(oauth_request.access_token.access_token) aw = buildAW(at, onlyPublic) # Get rid of API key in cache key if we did not impersonate a user if ak and aw.getUser() is None: cacheKey = normalizeQuery( path, query, remove=('_', 'ak', 'apiKey', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) else: cacheKey = normalizeQuery(path, query, remove=('_', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) if signature: # in case the request was signed, store the result under a different key cacheKey = 'signed_' + cacheKey else: # We authenticated using a session cookie. if Config.getInstance().getCSRFLevel() >= 2: token = request.headers.get( 'X-CSRF-Token', get_query_parameter(queryParams, ['csrftoken'])) if used_session.csrf_protected and used_session.csrf_token != token: raise HTTPAPIError('Invalid CSRF token', 403) aw = AccessWrapper() if not onlyPublic: aw.setUser(used_session.avatar) userPrefix = 'user-{}_'.format(used_session.user.id) cacheKey = userPrefix + normalizeQuery( path, query, remove=('_', 'nc', 'nocache', 'ca', 'cookieauth', 'oa', 'onlyauthed', 'csrftoken')) # Bail out if the user requires authentication but is not authenticated if onlyAuthed and not aw.getUser(): raise HTTPAPIError('Not authenticated', 403) addToCache = not hook.NO_CACHE cache = GenericCache('HTTPAPI') cacheKey = RE_REMOVE_EXTENSION.sub('', cacheKey) if not noCache: obj = cache.get(cacheKey) if obj is not None: result, extra, ts, complete, typeMap = obj addToCache = False if result is None: ContextManager.set("currentAW", aw) # Perform the actual exporting res = hook(aw) if isinstance(res, tuple) and len(res) == 4: result, extra, complete, typeMap = res else: result, extra, complete, typeMap = res, {}, True, {} if result is not None and addToCache: ttl = api_settings.get('cache_ttl') cache.set(cacheKey, (result, extra, ts, complete, typeMap), ttl) except HTTPAPIError, e: error = e if e.getCode(): responseUtil.status = e.getCode() if responseUtil.status == 405: responseUtil.headers[ 'Allow'] = 'GET' if request.method == 'POST' else 'POST'
if str(code) == '404': raise EVOException('Indico could not find the EVO Server at ' + getEVOOptionValueByName("httpServerLocation") + "(HTTP error 404)") elif str(code) == '500': raise EVOException( "The EVO server has an internal problem (HTTP error 500)", e) else: raise EVOException( """Problem when Indico tried to contact the EVO Server.\nReason: HTTPError: %s (code = %s, shortMessage = '%s', longMessage = '%s', url = '%s'""" % (str(e), code, shortMessage, longMessage, str(url)), e) except URLError, e: Logger.get('EVO').error( """Evt:%s, booking:%s, request: [%s] triggered exception: %s""" % (eventId, bookingId, str(url), str(e))) if str(e.reason).strip() == 'timed out': raise EVOException("The EVO server is not responding.", e) raise EVOException( 'URLError when contacting the EVO server for action: ' + action + '. Reason="' + str(e.reason) + '"', e) else: #we parse the answer encodingTextStart = '<fmt:requestEncoding value=' encodingTextEnd = '/>' answer = answer.strip() #we parse an eventual encoding specification, like <fmt:requestEncoding value="UTF-8"/> if answer.startswith(encodingTextStart):