def eventTitleChanged(cls, obj, oldTitle, newTitle): obj = Catalog.getIdx("cs_bookingmanager_conference").get(obj.getConference().getId()) try: obj.notifyTitleChange(oldTitle, newTitle) except Exception, e: Logger.get('PluginNotifier').error("Exception while trying to access to the title parameters when changing an event title" + str(e))
def to_serializable(self, attr='__public__', converters=None): serializable = {} if converters is None: converters = {} for k in getattr(self, attr): try: if isinstance(k, tuple): k, name = k else: k, name = k, k v = getattr(self, k) if callable(v): # to make it generic, we can get rid of it by properties v = v() if isinstance(v, Serializer): v = v.to_serializable() elif isinstance(v, list): v = [e.to_serializable() for e in v] elif isinstance(v, dict): v = dict((k, vv.to_serializable() if isinstance(vv, Serializer) else vv) for k, vv in v.iteritems()) elif isinstance(v, Enum): v = v.name if type(v) in converters: v = converters[type(v)](v) serializable[name] = v except Exception: msg = 'Could not retrieve {}.{}.'.format(self.__class__.__name__, k) Logger.get('Serializer{}'.format(self.__class__.__name__)).exception(msg) raise IndicoError(msg) return serializable
def _send(msgData): server = smtplib.SMTP(*Config.getInstance().getSmtpServer()) if Config.getInstance().getSmtpUseTLS(): server.ehlo() (code, errormsg) = server.starttls() if code != 220: raise MaKaCError(_("Can't start secure connection to SMTP server: %d, %s") % (code, errormsg)) if Config.getInstance().getSmtpLogin(): login = Config.getInstance().getSmtpLogin() password = Config.getInstance().getSmtpPassword() (code, errormsg) = server.login(login, password) if code != 235: raise MaKaCError(_("Can't login on SMTP server: %d, %s") % (code, errormsg)) to_addrs = msgData["toList"] | msgData["ccList"] | msgData["bccList"] try: Logger.get("mail").info( "Sending email: To: {} / CC: {} / BCC: {}".format( ", ".join(msgData["toList"]) or "None", ", ".join(msgData["ccList"]) or "None", ", ".join(msgData["bccList"]) or "None", ) ) server.sendmail(msgData["fromAddr"], to_addrs, msgData["msg"]) except smtplib.SMTPRecipientsRefused as e: raise MaKaCError("Email address is not valid: {}".format(e.recipients)) finally: server.quit() Logger.get("mail").info("Mail sent to {}".format(", ".join(to_addrs)))
def createIdentity(self, li, avatar): Logger.get("auth.ldap").info("createIdentity %s (%s %s)" % (li.getLogin(), avatar.getId(), avatar.getEmail())) data = self.checkLoginPassword(li.getLogin(), li.getPassword()) if data: return LDAPIdentity(li.getLogin(), avatar) else: return None
def _send(msgData): server=smtplib.SMTP(*Config.getInstance().getSmtpServer()) if Config.getInstance().getSmtpUseTLS(): server.ehlo() (code, errormsg) = server.starttls() if code != 220: raise MaKaCError( _("Can't start secure connection to SMTP server: %d, %s")%(code, errormsg)) if Config.getInstance().getSmtpLogin(): login = Config.getInstance().getSmtpLogin() password = Config.getInstance().getSmtpPassword() (code, errormsg) = server.login(login, password) if code != 235: raise MaKaCError( _("Can't login on SMTP server: %d, %s")%(code, errormsg)) to_addrs = msgData['toList'] | msgData['ccList'] | msgData['bccList'] try: Logger.get('mail').info('Sending email: To: {} / CC: {} / BCC: {}'.format( ', '.join(msgData['toList']) or 'None', ', '.join(msgData['ccList']) or 'None', ', '.join(msgData['bccList']) or 'None')) server.sendmail(msgData['fromAddr'], to_addrs, msgData['msg']) except smtplib.SMTPRecipientsRefused as e: raise MaKaCError('Email address is not valid: {}'.format(e.recipients)) finally: server.quit() Logger.get('mail').info('Mail sent to {}'.format(', '.join(to_addrs)))
def isRoomConnected(cls, booking, roomIp="", roomPanoramaUser=""): try: if roomIp != "": answer = RavemApi.isLegacyEndpointConnected(roomIp) else: answer = RavemApi.isVidyoPanoramaConnected(roomPanoramaUser) if not answer.ok or "error" in answer.json(): Logger.get('Vidyo').exception("""Evt:%s, booking:%s, Ravem API's isConnected operation not successfull: %s""" % (booking.getConference().getId(), booking.getId(), answer.text)) return VidyoError("roomCheckFailed", "roomConnected", _("There was a problem obtaining the room status from Vidyo. {0}").format(VidyoTools.getContactSupportText())) result = {"roomName": None, "isConnected": False, "service": None} answer = answer.json() if "result" in answer: for service in answer.get("result").get("services"): if service.get("name", "") == "videoconference": result["roomName"] = VidyoTools.recoverVidyoName(service.get("event_name")) result["isConnected"] = service.get("status") == 1 result["service"] = VidyoTools.recoverVidyoDescription(service.get("event_type")) return result except Exception: return VidyoError("roomCheckFailed", "roomConnected", _("There was a problem obtaining the room status from Vidyo. {0}").format( VidyoTools.getContactSupportText()))
def OAuthCheckAccessResource(cls): from indico.modules.oauth.db import ConsumerHolder, AccessTokenHolder, OAuthServer oauth_request = oauth.Request.from_request(request.method, request.base_url, request.headers, parameters=create_flat_args()) Logger.get('oauth.resource').info(oauth_request) try: now = nowutc() consumer_key = oauth_request.get_parameter('oauth_consumer_key') if not ConsumerHolder().hasKey(consumer_key): raise OAuthError('Invalid Consumer Key', 401) consumer = ConsumerHolder().getById(consumer_key) token = oauth_request.get_parameter('oauth_token') if not token or not AccessTokenHolder().hasKey(token): raise OAuthError('Invalid Token', 401) access_token = AccessTokenHolder().getById(token) oauth_consumer = oauth.Consumer(consumer.getId(), consumer.getSecret()) OAuthServer.getInstance().verify_request(oauth_request, oauth_consumer, access_token.getToken()) if access_token.getConsumer().getId() != oauth_consumer.key: raise OAuthError('Invalid Consumer Key', 401) elif (now - access_token.getTimestamp()) > timedelta(seconds=Config.getInstance().getOAuthAccessTokenTTL()): raise OAuthError('Expired Token', 401) return access_token except oauth.Error, e: if e.message.startswith("Invalid Signature"): raise OAuthError("Invalid Signature", 401) else: raise OAuthError(e.message, 400)
def create_next(cls, registrant, amount, currency, action, provider='_manual', data=None): event = registrant.getConference() new_transaction = PaymentTransaction(event_id=event.getId(), registrant_id=registrant.getId(), amount=amount, currency=currency, provider=provider, data=data) double_payment = False previous_transaction = cls.find_latest_for_registrant(registrant) try: next_status = TransactionStatusTransition.next(previous_transaction, action, provider) except InvalidTransactionStatus as e: Logger.get('payment').exception("{}\nData received: {}".format(e, data)) return None, None except InvalidManualTransactionAction as e: Logger.get('payment').exception("Invalid manual action code '{}' on initial status\n" "Data received: {}".format(e, data)) return None, None except InvalidTransactionAction as e: Logger.get('payment').exception("Invalid action code '{}' on initial status\n" "Data received: {}".format(e, data)) return None, None except IgnoredTransactionAction as e: Logger.get('payment').warning("{}\nData received: {}".format(e, data)) return None, None except DoublePaymentTransaction: next_status = TransactionStatus.successful double_payment = True Logger.get('payment').warning("Received successful payment for an already paid registrant") new_transaction.status = next_status return new_transaction, double_payment
def _addMaterialType(self, text, user): from MaKaC.common.fossilize import fossilize from MaKaC.fossils.conference import ILocalFileExtendedFossil, ILinkFossil Logger.get('requestHandler').debug('Adding %s - request %s' % (self._uploadType, request)) mat, newlyCreated = self._getMaterial() # if the material still doesn't exist, create it if newlyCreated: protectedAtResourceLevel = False else: protectedAtResourceLevel = True resources = [] assert self._uploadType == "file" for fileEntry in self._files: resource = LocalFile() resource.setFileName(fileEntry["fileName"]) resource.setFilePath(fileEntry["filePath"]) resource.setDescription(self._description) if self._displayName == "": resource.setName(resource.getFileName()) else: resource.setName(self._displayName) resources.append(resource) status = "OK" info = resources # forcedFileId - in case there is a conflict, use the file that is # already stored repoIDs = [] for i, resource in enumerate(resources): if self._repositoryIds: mat.addResource(resource, forcedFileId=self._repositoryIds[i]) else: mat.addResource(resource, forcedFileId=None) # store the repo id, for files if isinstance(resource, LocalFile) and self._repositoryIds is None: repoIDs.append(resource.getRepositoryId()) if protectedAtResourceLevel: protectedObject = resource else: protectedObject = mat mat.setHidden(self._visibility) mat.setAccessKey(self._password) protectedObject.setProtection(self._statusSelection) for principal in map(principal_from_fossil, self._userList): protectedObject.grantAccess(principal) if self._repositoryIds is None: self._repositoryIds = repoIDs return mat, status, fossilize(info, {"MaKaC.conference.Link": ILinkFossil, "MaKaC.conference.LocalFile": ILocalFileExtendedFossil})
def findGroupMemberUids(self, group): """ Finds uids of users in a group. Depends on groupStyle (SLAPD/ActiveDirectory) """ Logger.get('auth.ldap').debug('findGroupMemberUids(%s)' % group) groupDN = self._findDNOfGroup(group) if not groupDN: return [] # In ActiveDirectory users have multivalued attribute 'memberof' with list of groups # In SLAPD groups have multivalues attribute 'member' with list of users if self.groupStyle == 'ActiveDirectory': return self.nestedSearch(groupDN, {}) elif self.groupStyle == 'SLAPD': #read member attibute values from the group object members = None res = self.l.search_s(groupDN, ldap.SCOPE_BASE) for dn, data in res: if dn: members = data['member'] if not members: return [] memberUids = [] for memberDN in members: memberuid = LDAPTools.extractUIDFromDN(memberDN) if memberuid: memberUids.add(memberuid) Logger.get('auth.ldap').debug('findGroupMemberUids(%s) returns %s' % (group, memberUids)) return memberUids else: raise Exception("Unknown LDAP group style, choices are: SLAPD or ActiveDirectory")
def strip_ml_tags(in_text): """ Description: Removes all HTML/XML-like tags from the input text. Inputs: s --> string of text Outputs: text string without the tags # doctest unit testing framework >>> test_text = "Keep this Text <remove><me /> KEEP </remove> 123" >>> strip_ml_tags(test_text) 'Keep this Text KEEP 123' """ # convert in_text to a mutable object (e.g. list) s_list = list(in_text) i = 0 while i < len(s_list): # iterate until a left-angle bracket is found if s_list[i] == '<': try: while s_list[i] != '>': # pop everything from the the left-angle bracket until the right-angle bracket s_list.pop(i) except IndexError,e: Logger.get('strip_ml_tags').debug("Not found '>' (the end of the html tag): %s"%e) continue # pops the right-angle bracket, too s_list.pop(i) else: i=i+1
def getWebExTimeZoneToUTC( self, tz_id, the_date ): """ The date is required because the WebEx server responds with the offset of GMT time based on that date, adjusted for daylight savings, etc """ params = self.getBookingParams() request_xml = """<?xml version="1.0\" encoding="UTF-8"?> <serv:message xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:serv=\"http://www.webex.com/schemas/2002/06/service" > <header> <securityContext> <webExID>%(username)s</webExID> <password>%(password)s</password> <siteID>%(siteID)s</siteID> <partnerID>%(partnerID)s</partnerID> </securityContext> </header> <body> <bodyContent xsi:type="site.LstTimeZone" > <timeZoneID>%(tz_id)s</timeZoneID> <date>%(date)s</date> </bodyContent> </body> </serv:message> """ % ( { "username" : params['webExUser'], "password" : self.getWebExPass(), "siteID" : getWebExOptionValueByName("WESiteID"), "partnerID" : getWebExOptionValueByName("WEPartnerID"), "tz_id":tz_id, "date":the_date } ) response_xml = sendXMLRequest( request_xml ) dom = xml.dom.minidom.parseString( response_xml ) offset = dom.getElementsByTagName( "ns1:gmtOffset" )[0].firstChild.toxml('utf-8') try: return int(offset) except: Logger.get('WebEx').debug( "Eror requesting time zone offset from WebEx:\n\n%s" % ( response_xml ) ) return None
def create_next(cls, registration, amount, currency, action, provider=None, data=None): previous_transaction = registration.transaction new_transaction = PaymentTransaction(amount=amount, currency=currency, provider=provider, data=data) try: next_status = TransactionStatusTransition.next(previous_transaction, action, provider) except InvalidTransactionStatus as e: Logger.get('payment').exception("%s (data received: %r)", e, data) return None except InvalidManualTransactionAction as e: Logger.get('payment').exception("Invalid manual action code '%s' on initial status (data received: %r)", e, data) return None except InvalidTransactionAction as e: Logger.get('payment').exception("Invalid action code '%s' on initial status (data received: %r)", e, data) return None except IgnoredTransactionAction as e: Logger.get('payment').warning("%s (data received: %r)", e, data) return None except DoublePaymentTransaction: next_status = TransactionStatus.successful Logger.get('payment').info("Received successful payment for an already paid registration") registration.transaction = new_transaction new_transaction.status = next_status return new_transaction
def make_app(set_path=False, db_setup=True, testing=False): # If you are reading this code and wonder how to access the app: # >>> from flask import current_app as app # This only works while inside an application context but you really shouldn't have any # reason to access it outside this method without being inside an application context. # When set_path is enabled, SERVER_NAME and APPLICATION_ROOT are set according to BaseURL # so URLs can be generated without an app context, e.g. in the indico shell if _app_ctx_stack.top: Logger.get('flask').warn('make_app({}) called within app context, using existing app:\n{}'.format( set_path, '\n'.join(traceback.format_stack()))) return _app_ctx_stack.top.app app = IndicoFlask('indico', static_folder=None, template_folder='web/templates') app.config['TESTING'] = testing fix_root_path(app) configure_app(app, set_path) setup_jinja(app) with app.app_context(): setup_assets() if db_setup: configure_db(app) extend_url_map(app) add_handlers(app) add_blueprints(app) if app.config['INDICO_COMPAT_ROUTES']: add_compat_blueprints(app) if not app.config['TESTING']: add_plugin_blueprints(app) Logger.init_app(app) return app
def _run(self): Logger.get('OfflineEventGeneratorTask').info("Started generation of the offline website for task: %s" % self._task.id) setLocale(self._task.avatar.getLang()) self._rh = RHCustomizable() self._aw = self._rh._aw = AccessWrapper() self._rh._conf = self._rh._target = self._task.conference ContextManager.set('currentRH', self._rh) ContextManager.set('offlineMode', True) # Get event type wf = self._rh.getWebFactory() if wf: eventType = wf.getId() else: eventType = "conference" try: websiteZipFile = OfflineEvent(self._rh, self._rh._conf, eventType).create() except Exception, e: Logger.get('OfflineEventGeneratorTask').exception("Generation of the offline website for task %s failed \ with message error: %s" % (self._task.id, e)) self._task.status = "Failed" return
def get(self, key, default=None): self._connect() res = self._client.get(self._makeKey(key)) Logger.get('cache.generic').debug('GET %s %r (%s)', self._namespace, key, 'HIT' if res is not None else 'MISS') if res is None: return default return _NoneValue.restore(res)
def _check_version(self, distribution, current_version=None): try: response = requests.get('https://pypi.org/pypi/{}/json'.format(distribution)) except requests.RequestException as exc: Logger.get('versioncheck').warning('Version check for %s failed: %s', distribution, exc) raise NoReportError.wrap_exc(ServiceUnavailable()) try: data = response.json() except ValueError: return None if current_version is None: try: current_version = get_distribution(distribution).version except DistributionNotFound: return None current_version = Version(current_version) if current_version.is_prerelease: # if we are on a prerelease, get the latest one even if it's also a prerelease latest_version = Version(data['info']['version']) else: # if we are stable, get the latest stable version versions = [v for v in map(Version, data['releases']) if not v.is_prerelease] latest_version = max(versions) if versions else None return {'current_version': unicode(current_version), 'latest_version': unicode(latest_version) if latest_version else None, 'outdated': (current_version < latest_version) if latest_version else False}
def _sendReport( self ): cfg = Config.getInstance() # if no e-mail address was specified, # add a default one if self._userMail: fromAddr = self._userMail else: fromAddr = '*****@*****.**' toAddr = Config.getInstance().getSupportEmail() Logger.get('errorReport').debug('mailing %s' % toAddr) subject = "[Indico@%s] Error report"%cfg.getBaseURL() request_info = self._requestInfo or '' if isinstance(request_info, (dict, list)): request_info = pformat(request_info) # build the message body body = [ "-" * 20, "Error details\n", self._code, self._message, "Inner error: " + str(self._inner), request_info, "-" * 20 ] maildata = {"fromAddr": fromAddr, "toList": [toAddr], "subject": subject, "body": "\n".join(body)} GenericMailer.send(GenericNotification(maildata))
def sendParticipantsEmail(self, operation): params = self.getBookingParams() try: if params.has_key('sendAttendeesEmail') and params['sendAttendeesEmail'][0].lower() == 'yes': recipients = [] for k in self._participants.keys(): recipients.append( self._participants[k]._email ) if len(recipients)>0: if operation == 'remove': notification = WebExParticipantNotification( self,recipients, operation ) GenericMailer.send( notification ) else: notification = WebExParticipantNotification( self,recipients, operation, additionalText="This is a WebEx meeting invitation.<br/><br/>" ) GenericMailer.send( notification ) if params.has_key('sendCreatorEmail') and params['sendCreatorEmail'][0].lower() == 'yes': recipients = MailTools.getManagersEmailList(self.getConference(), 'WebEx') notification = WebExParticipantNotification( self,recipients, operation, additionalText="Dear event manager:<br/><br/>\n\n " ) GenericMailer.send( notification ) if params.has_key('sendSelfEmail') and params['sendSelfEmail'][0].lower() == 'yes' and params.has_key("loggedInEmail") and params["loggedInEmail"] != "": recipients = [ params["loggedInEmail"] ] notification = WebExParticipantNotification( self,recipients, operation, additionalText="You are receiving this email because you requested it when creating a WebEx booking via Indico.<br/><br/>\n\n " ) GenericMailer.send( notification ) except Exception,e: Logger.get('WebEx').error( """Could not send participant email for booking with id %s of event with id %s, operation %s, exception: %s""" % (self.getId(), self.getConference().getId(), operation, str(e))) Logger.get('WebEx').error( MailTools.getManagersEmailList(self.getConference(), 'WebEx') ) self._warning = _("The operation appears to have been successful, however there was an error in sending the emails to participants: %s" % str(e) )
def _process(self): if not self.plugin.can_manage_vc_rooms(session.user, self.event_new): flash( _("You are not allowed to refresh {plugin_name} rooms for this event.").format( plugin_name=self.plugin.friendly_name ), "error", ) return redirect(url_for(".manage_vc_rooms", self.event_new)) Logger.get("modules.vc").info("Refreshing VC room {} from event {}".format(self.vc_room, self._conf)) try: self.plugin.refresh_room(self.vc_room, self.event_new) except VCRoomNotFoundError as err: Logger.get("modules.vc").warning("VC room '{}' not found. Setting it as deleted.".format(self.vc_room)) self.vc_room.status = VCRoomStatus.deleted flash(err.message, "error") return redirect(url_for(".manage_vc_rooms", self.event_new)) flash( _("{plugin_name} room '{room.name}' refreshed").format( plugin_name=self.plugin.friendly_name, room=self.vc_room ), "success", ) return redirect(url_for(".manage_vc_rooms", self.event_new))
def process(): responseBody = { 'version': '1.1', 'error': None, 'result': None } requestBody = None try: # init/clear fossil cache clearCache() # read request try: requestBody = request.get_json() Logger.get('rpc').info('json rpc request. request: {0}'.format(requestBody)) except BadRequest: raise RequestError('ERR-R1', 'Invalid mime-type.') if not requestBody: raise RequestError('ERR-R2', 'Empty request.') if 'id' in requestBody: responseBody['id'] = requestBody['id'] # run request responseBody['result'] = ServiceRunner().invokeMethod(str(requestBody['method']), requestBody.get('params', [])) except CausedError as e: try: errorInfo = fossilize(e) except NonFossilizableException as e2: # catch Exceptions that are not registered as Fossils # and log them errorInfo = {'code': '', 'message': str(e2)} Logger.get('dev').exception('Exception not registered as fossil') # NoReport errors (i.e. not logged in) shouldn't be logged if not isinstance(e, NoReportError): Logger.get('rpc').exception('Service request failed. ' 'Request text:\r\n{0}\r\n\r\n'.format(requestBody)) if requestBody: params = requestBody.get('params', []) Sanitization._escapeHTML(params) errorInfo["requestInfo"] = { 'method': str(requestBody['method']), 'params': params, 'origin': str(requestBody.get('origin', 'unknown')) } Logger.get('rpc').debug('Arguments: {0}'.format(errorInfo['requestInfo'])) responseBody['error'] = errorInfo try: jsonResponse = dumps(responseBody, ensure_ascii=True) except UnicodeError: Logger.get('rpc').exception('Problem encoding JSON response') # This is to avoid exceptions due to old data encodings (based on iso-8859-1) responseBody['result'] = fix_broken_obj(responseBody['result']) jsonResponse = encode(responseBody) return app.response_class(jsonResponse, mimetype='application/json')
def _sendMail(self, operation): """ Overloads _sendMail behavior for EVO """ if operation == 'new': #notification to admin try: notification = notifications.NewVidyoPublicRoomNotificationAdmin(self) GenericMailer.sendAndLog(notification, self.getConference(), self.getPlugin().getName()) except Exception, e: Logger.get('Vidyo').error( """Could not send NewVidyoPublicRoomNotificationAdmin for booking with id %s of event with id %s, exception: %s""" % (self.getId(), self.getConference().getId(), str(e))) #notification to owner if isinstance(self.getOwnerObject(), Avatar): try: notification = notifications.VidyoOwnerChosenNotification(self) GenericMailer.sendAndLog(notification, self.getConference(), self.getPlugin().getName()) except Exception, e: Logger.get('Vidyo').error( """Could not send VidyoOwnerChosenNotification for booking with id %s of event with id %s, exception: %s""" % (self.getId(), self.getConference().getId(), str(e)))
def get(self, key, default=None): self._connect() res = self._client.get(self._makeKey(key)) Logger.get('GenericCache/%s' % self._namespace).debug('GET %r -> %r' % (key, res is not None)) if res is None: return default return _NoneValue.restore(res)
def updateMicalaCDSExport(cls, cds_indico_matches, cds_indico_pending): '''If there are records found in CDS but not yet listed in the micala database as COMPLETE, then update it. cds_indico_matches is a dictionary of key-value pairs { IndicoID1: CDSID1, IndicoID2: CDSID2, ... } cds_indico_pending is a list of IndicoIDs (for whom the CDS export task has been started but not completed).''' # Logger.get('RecMan').debug('in updateMicalaCDSExport()') # debugging: # for matched in cds_indico_matches.keys(): # Logger.get('RecMan').debug('Looping through cds_indico_matches: %s -> %s' % (matched, cds_indico_matches[matched])) # for pending in cds_indico_pending: # Logger.get('RecMan').debug('Looping through cds_indico_pending: %s' % pending) for pending in cds_indico_pending: # Logger.get('RecMan').debug('Looping through cds_indico_pending: %s (and looking up in cds_indico_matches)' % pending) try: newRecord = cds_indico_matches[pending] idMachine = cls.getIdMachine(CollaborationTools.getOptionValue("RecordingManager", "micalaDBMachineName")) idTask = cls.getIdTask(CollaborationTools.getOptionValue("RecordingManager", "micalaDBStatusExportCDS")) idLecture = cls.getIdLecture(pending) cls.reportStatus("COMPLETE", "CDS record: %s" % newRecord, idMachine, idTask, idLecture) # add the CDS record number to the Lectures table resultAssociateCDSRecord = cls.associateCDSRecordToLOID(newRecord, idLecture) if not resultAssociateCDSRecord["success"]: Logger.get('RecMan').error("Unable to update Lectures table in micala database: %s" % resultAssociateCDSRecord["result"]) # this is not currently used: return resultAssociateCDSRecord["result"] except KeyError: # current pending lecture still not found in CDS so do nothing. Logger.get('RecMan').debug('%s listed as pending and not found in cds_indico_matches, so it must still be pending.' % pending)
def create_next(cls, registration, amount, currency, action, provider=None, data=None): previous_transaction = registration.transaction new_transaction = PaymentTransaction(amount=amount, currency=currency, provider=provider, data=data) registration.transaction = new_transaction double_payment = False try: next_status = TransactionStatusTransition.next(previous_transaction, action, provider) except InvalidTransactionStatus as e: Logger.get('payment').exception("{}\nData received: {}".format(e, data)) return None, None except InvalidManualTransactionAction as e: Logger.get('payment').exception("Invalid manual action code '{}' on initial status\n" "Data received: {}".format(e, data)) return None, None except InvalidTransactionAction as e: Logger.get('payment').exception("Invalid action code '{}' on initial status\n" "Data received: {}".format(e, data)) return None, None except IgnoredTransactionAction as e: Logger.get('payment').warning("{}\nData received: {}".format(e, data)) return None, None except DoublePaymentTransaction: next_status = TransactionStatus.successful double_payment = True Logger.get('payment').warning("Received successful payment for an already paid registration") new_transaction.status = next_status return new_transaction, double_payment
def unindex(self, entryId): intId = self.getInteger(entryId) if intId != None: self.removeString(entryId) self._textIdx.unindex_doc(intId) else: Logger.get('indexes.text').error("No such entry '%s'" % entryId)
def _process(self): # TODO: Token should have flag authorized=False token = oauth.Token(OAuthUtils.gen_random_string(), OAuthUtils.gen_random_string()) token.set_callback(self._oauth_request.get_parameter('oauth_callback')) timestamp = nowutc() TempRequestTokenHolder().add(Token(token.key, token, timestamp, self._consumer, None)) Logger.get('oauth.request_token').info(token.to_string()) return token.to_string()
def set(self, key, val, ttl=0): try: if ttl: self._client.setex(key, ttl, pickle.dumps(val)) else: self._client.set(key, pickle.dumps(val)) except redis.RedisError: Logger.get('cache.redis').exception('set(%r, %r, %r) failed', key, val, ttl)
def set_multi(self, mapping, ttl=0): try: self._client.mset(dict((k, pickle.dumps(v)) for k, v in mapping.iteritems())) if ttl: for key in mapping: self._client.expire(key, ttl) except redis.RedisError: Logger.get('cache.redis').exception('set_multi(%r, %r) failed', mapping, ttl)
def eventDatesChanged(cls, obj, oldStartDate, oldEndDate, newStartDate, newEndDate): obj = Catalog.getIdx("cs_bookingmanager_conference").get(obj.getConference().getId()) try: obj.notifyEventDateChanges(oldStartDate, newStartDate, oldEndDate, newEndDate) except Exception, e: Logger.get('PluginNotifier').error("Exception while trying to access to the date parameters when changing an event date" + str(e))
def strip_ml_tags(in_text): """ Description: Removes all HTML/XML-like tags from the input text. Inputs: s --> string of text Outputs: text string without the tags # doctest unit testing framework >>> test_text = "Keep this Text <remove><me /> KEEP </remove> 123" >>> strip_ml_tags(test_text) 'Keep this Text KEEP 123' """ # convert in_text to a mutable object (e.g. list) s_list = list(in_text) i = 0 while i < len(s_list): # iterate until a left-angle bracket is found if s_list[i] == '<': try: while s_list[i] != '>': # pop everything from the the left-angle bracket until the right-angle bracket s_list.pop(i) except IndexError as e: Logger.get('strip_ml_tags').debug( "Not found '>' (the end of the html tag): %s" % e) continue # pops the right-angle bracket, too s_list.pop(i) else: i = i + 1 # convert the list back into text join_char = '' return join_char.join(s_list)
def queryRoom(cls, booking, roomId): """ Searches for room information via the admin api's getRoom function and the user api's search function (currently the admin api's getRoom is not reliable to retrieve name, description and groupName). Tries to find the room providing the extension as query (since only the room name and extension are checked by the search op). Returns None if not found """ confId = booking.getConference().getId() bookingId = booking.getId() roomId = booking.getRoomId() try: adminApiRoom = AdminApi.getRoom(roomId) except WebFault, e: faultString = e.fault.faultstring if faultString.startswith('Room not found for roomID'): return VidyoError("unknownRoom", "checkStatus") else: Logger.get('Vidyo').exception( """Evt:%s, booking:%s, Admin API's getRoom operation got WebFault: %s""" % (confId, bookingId, e.fault.faultstring)) raise
def to_serializable(self, attr='__public__', converters=None): serializable = {} if converters is None: converters = {} for k in getattr(self, attr): try: if isinstance(k, tuple): k, name = k else: k, name = k, k v = getattr(self, k) if callable( v ): # to make it generic, we can get rid of it by properties v = v() if isinstance(v, Serializer): v = v.to_serializable() elif isinstance(v, list): v = [e.to_serializable() for e in v] elif isinstance(v, dict): v = dict((k, vv.to_serializable( ) if isinstance(vv, Serializer) else vv) for k, vv in v.iteritems()) elif isinstance(v, Enum): v = v.name if type(v) in converters: v = converters[type(v)](v) serializable[name] = v except Exception: msg = 'Could not retrieve {}.{}.'.format( self.__class__.__name__, k) Logger.get('Serializer{}'.format( self.__class__.__name__)).exception(msg) raise IndicoError(msg) return serializable
def _process(self): if not self.plugin.can_manage_vc_rooms(session.user, self.event): flash( _('You are not allowed to refresh {plugin_name} rooms for this event.' ).format(plugin_name=self.plugin.friendly_name), 'error') return redirect(url_for('.manage_vc_rooms', self.event)) Logger.get('modules.vc').info("Refreshing VC room %r from event %r", self.vc_room, self.event) try: self.plugin.refresh_room(self.vc_room, self.event) except VCRoomNotFoundError as err: Logger.get('modules.vc').warning( "VC room %r not found. Setting it as deleted.", self.vc_room) self.vc_room.status = VCRoomStatus.deleted flash(err.message, 'error') return redirect(url_for('.manage_vc_rooms', self.event)) flash( _("{plugin_name} room '{room.name}' refreshed").format( plugin_name=self.plugin.friendly_name, room=self.vc_room), 'success') return redirect(url_for('.manage_vc_rooms', self.event))
def _check_version(self, distribution, current_version=None): try: response = requests.get( 'https://pypi.org/pypi/{}/json'.format(distribution)) except requests.RequestException as exc: Logger.get('versioncheck').warning( 'Version check for %s failed: %s', distribution, exc) raise NoReportError.wrap_exc(ServiceUnavailable()) try: data = response.json() except ValueError: return None if current_version is None: try: current_version = get_distribution(distribution).version except DistributionNotFound: return None current_version = Version(current_version) if current_version.is_prerelease: # if we are on a prerelease, get the latest one even if it's also a prerelease latest_version = Version(data['info']['version']) else: # if we are stable, get the latest stable version versions = [ v for v in map(Version, data['releases']) if not v.is_prerelease ] latest_version = max(versions) if versions else None return { 'current_version': unicode(current_version), 'latest_version': unicode(latest_version) if latest_version else None, 'outdated': (current_version < latest_version) if latest_version else False }
def get(self, key): try: path = self._getFilePath(key, False) if not os.path.exists(path): return None f = open(path, 'rb') OSSpecific.lockFile(f, 'LOCK_SH') expiry = val = None try: expiry, val = pickle.load(f) finally: OSSpecific.lockFile(f, 'LOCK_UN') f.close() if expiry and time.time() > expiry: return None except (IOError, OSError): Logger.get('cache.files').exception('Error getting cached value') return None except (EOFError, pickle.UnpicklingError): Logger.get('cache.files').exception('Cached information seems corrupted. Overwriting it.') return None return val
def wrapper(*args, **kw): for e in list(args) + kw.values(): if isinstance(e, Exception): exception = e break else: raise IndicoError('Wrong usage of jsonify_error: No error found in params') tb = traceback.format_exc() if logging_level != 'exception' else '' getattr(Logger.get(logger_name), logging_level)( logger_message if logger_message else 'Request {0} finished with {1}: {2}\n{3}'.format( request, exception.__class__.__name__, exception, tb ).rstrip()) if request.is_xhr or request.headers.get('Content-Type') == 'application/json': return create_json_error_answer(exception) else: return f(*args, **kw)
def _process_success(self): Logger.get('requestHandler').info('Request successful') # request is succesfull, now, doing tasks that must be done only once try: flush_after_commit_queue(True) GenericMailer.flushQueue(True) # send emails self._deleteTempFiles() except: Logger.get('mail').exception('Mail sending operation failed') # execute redis pipeline if we have one if self._redisPipeline: try: self._redisPipeline.execute() except RedisError: Logger.get('redis').exception('Could not execute pipeline')
def apply_db_loggers(debug=False): if not debug or getattr(db, '_loggers_applied', False): return db._loggers_applied = True from indico.core.logger import Logger logger = Logger.get('db') logger.setLevel(logging.DEBUG) @listens_for(Engine, 'before_cursor_execute') def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): context._query_start_time = time.time() source_line = _get_sql_line() if source_line: log_msg = 'Start Query:\n {0[file]}:{0[line]} {0[function]}\n\n{1}\n{2}'.format( source_line, _prettify_sql(statement), _prettify_params(parameters) if parameters else '' ).rstrip() else: # UPDATEs can't be traced back to their source since they are executed only on flush log_msg = 'Start Query:\n{0}\n{1}'.format( _prettify_sql(statement), _prettify_params(parameters) if parameters else '' ).rstrip() logger.debug(log_msg, extra={'sql_log_type': 'start', 'sql_source': source_line['items'] if source_line else None, 'sql_statement': statement, 'sql_verb': statement.split()[0], 'sql_params': parameters}) @listens_for(Engine, 'after_cursor_execute') def after_cursor_execute(conn, cursor, statement, parameters, context, executemany): total = time.time() - context._query_start_time logger.debug('Query complete; total time: {}'.format(total), extra={'sql_log_type': 'end', 'sql_duration': total, 'sql_verb': statement.split()[0]})
def getEVOAnswer(action, arguments={}, eventId='', bookingId=''): url = getRequestURL(action, arguments) Logger.get('EVO').info( """Evt:%s, booking:%s, sending request to EVO: [%s]""" % (eventId, bookingId, str(url))) try: answer = urlopen( str(url), timeout=secondsToWait ).read(readLimit).strip( ) #we remove any whitespaces, etc. We won't read more than 100k characters Logger.get('EVO').info( """Evt:%s, booking:%s, got answer (unprocessed): [%s]""" % (eventId, bookingId, str(answer))) except HTTPError, e: code = e.code shortMessage = BaseHTTPRequestHandler.responses[code][0] longMessage = BaseHTTPRequestHandler.responses[code][1] Logger.get('EVO').error( """Evt:%s, booking:%s, request: [%s] triggered HTTPError: %s (code = %s, shortMessage = '%s', longMessage = '%s'""" % (eventId, bookingId, str(url), str(e), code, shortMessage, longMessage)) if str(code) == '404': raise EVOException('Indico could not find the EVO Server at ' + getEVOOptionValueByName("httpServerLocation") + "(HTTP error 404)") elif str(code) == '500': raise EVOException( "The EVO server has an internal problem (HTTP error 500)", e) else: raise EVOException( """Problem when Indico tried to contact the EVO Server.\nReason: HTTPError: %s (code = %s, shortMessage = '%s', longMessage = '%s', url = '%s'""" % (str(e), code, shortMessage, longMessage, str(url)), e)
def handler(prefix, path): path = posixpath.join('/', prefix, path) clearCache() # init fossil cache logger = Logger.get('httpapi') if request.method == 'POST': # Convert POST data to a query string queryParams = [(key, [x.encode('utf-8') for x in values]) for key, values in request.form.iterlists()] query = urllib.urlencode(queryParams, doseq=1) # we only need/keep multiple values so we can properly validate the signature. # the legacy code below expects a dict with just the first value. # if you write a new api endpoint that needs multiple values get them from # ``request.values.getlist()`` directly queryParams = {key: values[0] for key, values in queryParams} else: # Parse the actual query string queryParams = dict((key, value.encode('utf-8')) for key, value in request.args.iteritems()) query = request.query_string apiKey = get_query_parameter(queryParams, ['ak', 'apikey'], None) cookieAuth = get_query_parameter(queryParams, ['ca', 'cookieauth'], 'no') == 'yes' signature = get_query_parameter(queryParams, ['signature']) timestamp = get_query_parameter(queryParams, ['timestamp'], 0, integer=True) noCache = get_query_parameter(queryParams, ['nc', 'nocache'], 'no') == 'yes' pretty = get_query_parameter(queryParams, ['p', 'pretty'], 'no') == 'yes' onlyPublic = get_query_parameter(queryParams, ['op', 'onlypublic'], 'no') == 'yes' onlyAuthed = get_query_parameter(queryParams, ['oa', 'onlyauthed'], 'no') == 'yes' scope = 'read:legacy_api' if request.method == 'GET' else 'write:legacy_api' if not request.headers.get('Authorization', '').lower().startswith('basic '): try: oauth_valid, oauth_request = oauth.verify_request([scope]) if not oauth_valid and oauth_request and oauth_request.error_message != 'Bearer token not found.': raise BadRequest('OAuth error: {}'.format( oauth_request.error_message)) elif g.get( 'received_oauth_token' ) and oauth_request.error_message == 'Bearer token not found.': raise BadRequest('OAuth error: Invalid token') except ValueError: # XXX: Dirty hack to workaround a bug in flask-oauthlib that causes it # not to properly urlencode request query strings # Related issue (https://github.com/lepture/flask-oauthlib/issues/213) oauth_valid = False else: oauth_valid = False # Get our handler function and its argument and response type hook, dformat = HTTPAPIHook.parseRequest(path, queryParams) if hook is None or dformat is None: raise NotFound # Disable caching if we are not just retrieving data (or the hook requires it) if request.method == 'POST' or hook.NO_CACHE: noCache = True ak = error = result = None ts = int(time.time()) typeMap = {} status_code = None is_response = False try: used_session = None if cookieAuth: used_session = session if not used_session.user: # ignore guest sessions used_session = None if apiKey or oauth_valid or not used_session: if not oauth_valid: # Validate the API key (and its signature) ak, enforceOnlyPublic = checkAK(apiKey, signature, timestamp, path, query) if enforceOnlyPublic: onlyPublic = True # Create an access wrapper for the API key's user user = ak.user if ak and not onlyPublic else None else: # Access Token (OAuth) at = load_token(oauth_request.access_token.access_token) user = at.user if at and not onlyPublic else None # Get rid of API key in cache key if we did not impersonate a user if ak and user is None: cacheKey = normalizeQuery( path, query, remove=('_', 'ak', 'apiKey', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) else: cacheKey = normalizeQuery(path, query, remove=('_', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) if signature: # in case the request was signed, store the result under a different key cacheKey = 'signed_' + cacheKey else: # We authenticated using a session cookie. token = request.headers.get( 'X-CSRF-Token', get_query_parameter(queryParams, ['csrftoken'])) if used_session.csrf_protected and used_session.csrf_token != token: raise HTTPAPIError('Invalid CSRF token', 403) user = used_session.user if not onlyPublic else None userPrefix = 'user-{}_'.format(used_session.user.id) cacheKey = userPrefix + normalizeQuery( path, query, remove=('_', 'nc', 'nocache', 'ca', 'cookieauth', 'oa', 'onlyauthed', 'csrftoken')) # Bail out if the user requires authentication but is not authenticated if onlyAuthed and not user: raise HTTPAPIError('Not authenticated', 403) addToCache = not hook.NO_CACHE cache = GenericCache('HTTPAPI') cacheKey = RE_REMOVE_EXTENSION.sub('', cacheKey) if not noCache: obj = cache.get(cacheKey) if obj is not None: result, extra, ts, complete, typeMap = obj addToCache = False if result is None: g.current_api_user = user # Perform the actual exporting res = hook(user) if isinstance(res, current_app.response_class): addToCache = False is_response = True result, extra, complete, typeMap = res, {}, True, {} elif isinstance(res, tuple) and len(res) == 4: result, extra, complete, typeMap = res else: result, extra, complete, typeMap = res, {}, True, {} if result is not None and addToCache: ttl = api_settings.get('cache_ttl') if ttl > 0: cache.set(cacheKey, (result, extra, ts, complete, typeMap), ttl) except HTTPAPIError as e: error = e if e.getCode(): status_code = e.getCode() if result is None and error is None: # TODO: usage page raise NotFound else: if ak and error is None: # Commit only if there was an API key and no error norm_path, norm_query = normalizeQuery(path, query, remove=('signature', 'timestamp'), separate=True) uri = to_unicode('?'.join(filter(None, (norm_path, norm_query)))) ak.register_used(request.remote_addr, uri, not onlyPublic) db.session.commit() else: # No need to commit stuff if we didn't use an API key (nothing was written) # XXX do we even need this? db.session.rollback() # Log successful POST api requests if error is None and request.method == 'POST': logger.info('API request: %s?%s', path, query) if is_response: return result serializer = Serializer.create(dformat, query_params=queryParams, pretty=pretty, typeMap=typeMap, **hook.serializer_args) if error: if not serializer.schemaless: # if our serializer has a specific schema (HTML, ICAL, etc...) # use JSON, since it is universal serializer = Serializer.create('json') result = fossilize(error) else: if serializer.encapsulate: result = fossilize( HTTPAPIResult(result, path, query, ts, complete, extra), IHTTPAPIExportResultFossil) del result['_fossil'] try: data = serializer(result) response = current_app.make_response(data) content_type = serializer.get_response_content_type() if content_type: response.content_type = content_type if status_code: response.status_code = status_code return response except Exception: logger.exception('Serialization error in request %s?%s', path, query) raise
def apply_db_loggers(app): if not app.debug or getattr(db, '_loggers_applied', False): return db._loggers_applied = True from indico.core.logger import Logger logger = Logger.get('db') logger.setLevel(logging.DEBUG) @listens_for(Engine, 'before_cursor_execute') def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): if not g.get('req_start_sent'): g.req_start_sent = True logger.debug('Request started', extra={ 'sql_log_type': 'start_request', 'repl': app.config.get('REPL'), 'req_verb': request.method if has_request_context() else None, 'req_path': request.path if has_request_context() else None, 'req_url': request.url if has_request_context() else None }) context._query_start_time = time.time() source_line = _get_sql_line() if source_line: log_msg = 'Start Query:\n {0[file]}:{0[line]} {0[function]}\n\n{1}\n{2}'.format( source_line, _prettify_sql(statement), _prettify_params(parameters) if parameters else '').rstrip() else: # UPDATEs can't be traced back to their source since they are executed only on flush log_msg = 'Start Query:\n{0}\n{1}'.format( _prettify_sql(statement), _prettify_params(parameters) if parameters else '').rstrip() # psycopg2._psycopg.Binary objects are extremely weird and don't work in isinstance checks if hasattr(parameters, 'iteritems'): parameters = {k: _fix_param(v) for k, v in parameters.iteritems()} else: parameters = tuple(_fix_param(v) for v in parameters) logger.debug(log_msg, extra={ 'sql_log_type': 'start', 'req_path': request.path if has_request_context() else None, 'sql_source': source_line['items'] if source_line else None, 'sql_statement': statement, 'sql_verb': statement.split()[0], 'sql_params': parameters }) @listens_for(Engine, 'after_cursor_execute') def after_cursor_execute(conn, cursor, statement, parameters, context, executemany): total = time.time() - context._query_start_time source_line = _get_sql_line() source = source_line['items'] if source_line else None logger.debug('Query complete; total time: %s', total, extra={ 'sql_log_type': 'end', 'req_path': (request.path if has_request_context() else None), 'sql_source': source, 'sql_duration': total, 'sql_verb': statement.split()[0] }) @appcontext_tearing_down.connect_via(app) @request_tearing_down.connect_via(app) def on_tearing_down(sender, **kwargs): if g.get('req_end_sent'): return g.req_end_sent = True stats = get_request_stats() if not stats['query_count']: return logger.debug('Request finished', extra={ 'sql_log_type': 'end_request', 'sql_query_count': stats['query_count'], 'repl': app.config.get('REPL'), 'req_verb': request.method if has_request_context() else None, 'req_url': request.url if has_request_context() else None, 'req_path': request.path if has_request_context() else None, 'req_duration': stats['req_duration'], 'req_query_duration': stats['query_duration'] })
from flask import flash, session from indico.core import signals from indico.core.db import db from indico.core.logger import Logger from indico.core.permissions import ManagementPermission from indico.modules.events.editing.clone import EditingSettingsCloner from indico.modules.events.features.base import EventFeature from indico.modules.events.models.events import Event, EventType from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.editing') class EditingFeature(EventFeature): name = 'editing' friendly_name = _('Editing') description = _('Gives event managers the opportunity to let contributors submit papers and/or slides to be edited ' 'and eventually published.') @classmethod def is_allowed_for_event(cls, event): return event.type_ == EventType.conference @classmethod def enabled(cls, event, cloning): from indico.modules.events.editing.models.file_types import EditingFileType
def logger(cls): return Logger.get(f'plugin.{cls.name}')
from indico.core import signals from indico.core.db.sqlalchemy.principals import PrincipalType from indico.core.logger import Logger from indico.core.permissions import ManagementPermission, check_permissions, get_available_permissions from indico.modules.events.cloning import get_event_cloners from indico.modules.events.logs import EventLogKind, EventLogRealm from indico.modules.events.models.events import Event from indico.modules.events.models.legacy_mapping import LegacyEventMapping from indico.util.i18n import _, ngettext, orig_string from indico.util.string import is_legacy_id from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem, TopMenuItem, TopMenuSection __all__ = ('Event', 'logger', 'event_management_object_url_prefixes', 'event_object_url_prefixes') logger = Logger.get('events') #: URL prefixes for the various event objects (public area) #: All prefixes are expected to be used inside the '/event/<int:event_id>' #: url space. event_object_url_prefixes = { 'event': [''], 'session': ['/sessions/<int:session_id>'], 'contribution': ['/contributions/<int:contrib_id>'], 'subcontribution': ['/contributions/<int:contrib_id>/subcontributions/<int:subcontrib_id>'] } #: URL prefixes for the various event objects (management area) #: All prefixes are expected to be used inside the '/event/<int:event_id>' #: url space. event_management_object_url_prefixes = {
from werkzeug.wrappers import Response from indico.core import signals from indico.core.config import config from indico.core.db import db from indico.core.db.sqlalchemy.core import handle_sqlalchemy_database_error from indico.core.logger import Logger, sentry_set_tags from indico.core.notifications import flush_email_queue, init_email_queue from indico.legacy.common import fossilize from indico.util.i18n import _ from indico.util.locators import get_locator from indico.util.signals import values_from_signal from indico.web.flask.util import ResponseUtil, url_for HTTP_VERBS = {'GET', 'PATCH', 'POST', 'PUT', 'DELETE'} logger = Logger.get('rh') class RH(object): CSRF_ENABLED = True # require a csrf_token when accessing the RH with anything but GET EVENT_FEATURE = None # require a certain event feature when accessing the RH. See `EventFeature` for details DENY_FRAMES = False # whether to send an X-Frame-Options:DENY header #: A dict specifying how the url should be normalized. #: `args` is a dictionary mapping view args keys to callables #: used to retrieve the expected value for those arguments if they #: are present in the request's view args. #: `locators` is a set of callables returning objects with locators. #: `preserved_args` is a set of view arg names which will always #: be copied from the current request if present. #: The callables are always invoked with a single `self` argument
# # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from __future__ import unicode_literals from flask import session from indico.core import signals from indico.core.logger import Logger from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.persons') @signals.menu.items.connect_via('event-management-sidemenu') def _sidemenu_items(sender, event, **kwargs): if event.can_manage(session.user): return SideMenuItem('persons', _('Participant Roles'), url_for('persons.person_list', event), section='organization') @signals.get_placeholders.connect_via('event-persons-email') def _get_placeholders(sender, person, event, register_link=False, **kwargs): from indico.modules.events.persons.placeholders import ( FirstNamePlaceholder, LastNamePlaceholder, EmailPlaceholder,
def _addMaterialType(self, text, user): from MaKaC.common.fossilize import fossilize from MaKaC.fossils.conference import ILocalFileExtendedFossil, ILinkFossil Logger.get('requestHandler').debug('Adding %s - request %s' % (self._uploadType, request)) mat, newlyCreated = self._getMaterial() # if the material still doesn't exist, create it if newlyCreated: protectedAtResourceLevel = False else: protectedAtResourceLevel = True resources = [] if self._uploadType in ['file', 'link']: if self._uploadType == "file": for fileEntry in self._files: resource = LocalFile() resource.setFileName(fileEntry["fileName"]) resource.setFilePath(fileEntry["filePath"]) resource.setDescription(self._description) if self._displayName == "": resource.setName(resource.getFileName()) else: resource.setName(self._displayName) if not type(self._target) is Category: log_info = { "subject": "Added file %s%s" % (fileEntry["fileName"], text) } self._target.getConference().getLogHandler().logAction( log_info, log.ModuleNames.MATERIAL) resources.append(resource) # in case of db conflict we do not want to send the file to conversion again, nor re-store the file elif self._uploadType == "link": for link in self._links: resource = Link() resource.setURL(link["url"]) resource.setDescription(self._description) if self._displayName == "": resource.setName(resource.getURL()) else: resource.setName(self._displayName) if not type(self._target) is Category: log_info = { "subject": "Added link %s%s" % (resource.getURL(), text) } self._target.getConference().getLogHandler().logAction( log_info, log.ModuleNames.MATERIAL) resources.append(resource) status = "OK" info = resources else: status = "ERROR" info = "Unknown upload type" return mat, status, info # forcedFileId - in case there is a conflict, use the file that is # already stored repoIDs = [] for i, resource in enumerate(resources): if self._repositoryIds: mat.addResource(resource, forcedFileId=self._repositoryIds[i]) else: mat.addResource(resource, forcedFileId=None) #apply conversion if self._topdf and not isinstance(resource, Link): file_ext = os.path.splitext( resource.getFileName())[1].strip().lower() if fileConverter.CDSConvFileConverter.hasAvailableConversionsFor( file_ext): # Logger.get('conv').debug('Queueing %s for conversion' % resource.getFilePath()) fileConverter.CDSConvFileConverter.convert( resource.getFilePath(), 'pdf', mat) resource.setPDFConversionRequestDate(nowutc()) # store the repo id, for files if isinstance(resource, LocalFile) and self._repositoryIds is None: repoIDs.append(resource.getRepositoryId()) if protectedAtResourceLevel: protectedObject = resource else: protectedObject = mat mat.setHidden(self._visibility) mat.setAccessKey(self._password) protectedObject.setProtection(self._statusSelection) for userElement in self._userList: if 'isGroup' in userElement and userElement['isGroup']: avatar = GroupHolder().getById(userElement['id']) else: avatar = AvatarHolder().getById(userElement['id']) protectedObject.grantAccess(avatar) self._topdf = False if self._repositoryIds is None: self._repositoryIds = repoIDs return mat, status, fossilize( info, { "MaKaC.conference.Link": ILinkFossil, "MaKaC.conference.LocalFile": ILocalFileExtendedFossil })
# along with Indico; if not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals from flask import session from indico.core import signals from indico.core.logger import Logger from indico.core.settings import SettingsProxy from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem __all__ = ('logger', 'cephalopod_settings') logger = Logger.get('cephalopod') cephalopod_settings = SettingsProxy( 'cephalopod', { 'show_migration_message': False, 'joined': False, 'contact_email': None, 'contact_name': None, 'uuid': None }) @signals.menu.items.connect_via('admin-sidemenu') def _extend_admin_menu(sender, **kwargs): if session.user.is_admin: return SideMenuItem('cephalopod',
def canonicalize_url(): url_root = request.url_root.rstrip('/') if config.BASE_URL != url_root: Logger.get('flask').info( 'Received request with invalid url root for %s', request.url) return render_template('bad_url_error.html'), 404
# # You should have received a copy of the GNU General Public License # along with Indico; if not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals from flask import flash, session, request from indico.core import signals from indico.core.logger import Logger from indico.modules.events.settings import EventSettingsProxy from indico.util.i18n import ngettext from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.features') features_event_settings = EventSettingsProxy('features', {'enabled': None}) @signals.menu.items.connect_via('event-management-sidemenu') def _extend_event_management_menu(sender, event, **kwargs): if not event.can_manage(session.user): return return SideMenuItem('features', 'Features', url_for('event_features.index', event), section='advanced') @signals.app_created.connect def _check_feature_definitions(app, **kwargs):
def handle_exception(exception): Logger.get('wsgi').exception(exception.message or 'WSGI Exception') if current_app.debug: raise return render_error(_("An unexpected error occurred."), str(exception), standalone=True), 500
from datetime import date import click from celery.exceptions import MaxRetriesExceededError, Retry from sqlalchemy.orm.attributes import flag_modified from indico.core.celery import celery from indico.core.config import config from indico.core.db import db from indico.core.logger import Logger from indico.util.date_time import now_utc from indico.util.emails.backend import EmailBackend from indico.util.emails.message import EmailMessage from indico.util.string import truncate logger = Logger.get('emails') MAX_TRIES = 10 DELAYS = [30, 60, 120, 300, 600, 1800, 3600, 3600, 7200] @celery.task(name='send_email', bind=True, max_retries=None) def send_email_task(task, email, log_entry=None): attempt = task.request.retries + 1 try: do_send_email(email, log_entry, _from_task=True) except Exception as exc: delay = (DELAYS + [0])[task.request.retries] if not config.DEBUG else 1 try: task.retry(countdown=delay, max_retries=(MAX_TRIES - 1)) except MaxRetriesExceededError: if log_entry:
def _process(self): # We will need to pickle the data back into JSON user = self.getAW().getUser() if not self._loggedIn: return json.dumps( { 'status': 'ERROR', 'info': { 'type': 'noReport', 'title': '', 'explanation': _('You are currently not authenticated. Please log in again.' ) } }, textarea=True) try: owner = self._target title = owner.getTitle() if type(owner) == Conference: ownerType = "event" elif type(owner) == Session: ownerType = "session" elif type(owner) == Contribution: ownerType = "contribution" elif type(owner) == SubContribution: ownerType = "subcontribution" else: ownerType = "" text = " in %s %s" % (ownerType, title) except: owner = None text = "" try: if len(self._errorList) > 0: raise Exception('Operation aborted') else: mat, status, info = self._addMaterialType(text, user) if status == "OK": for entry in info: entry['material'] = mat.getId() except Exception, e: status = "ERROR" if 'file' in self._params: del self._params['file'] info = { 'message': self._errorList or " %s: %s" % (e.__class__.__name__, str(e)), 'code': '0', 'requestInfo': self._params } Logger.get('requestHandler').exception('Error uploading file')
from indico.core.logger import Logger from indico.core.notifications import make_email, send_email from indico.core.settings import SettingsProxy from indico.core.settings.converters import EnumConverter from indico.modules.users.ext import ExtraUserPreferences from indico.modules.users.models.settings import UserSetting, UserSettingsProxy from indico.modules.users.models.users import NameFormat, User from indico.util.i18n import _ from indico.web.flask.templating import get_template_module from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem, TopMenuItem __all__ = ('ExtraUserPreferences', 'User', 'UserSetting', 'UserSettingsProxy', 'user_settings') logger = Logger.get('users') user_settings = UserSettingsProxy( 'users', { 'lang': None, 'timezone': None, 'force_timezone': False, # always use the user's timezone instead of an event's timezone 'show_future_events': False, 'show_past_events': False, 'name_format': NameFormat.first_last, 'use_previewer_pdf': True, 'synced_fields': None, # None to synchronize all fields, empty set to not synchronize 'suggest_categories':
# LICENSE file for more details. from __future__ import unicode_literals from flask import render_template, session from indico.core import signals from indico.core.logger import Logger from indico.modules.events.timetable.models.entries import TimetableEntry, TimetableEntryType from indico.util.date_time import now_utc from indico.util.i18n import _ from indico.web.flask.templating import template_hook from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.timetable') @signals.event.sidemenu.connect def _extend_event_menu(sender, **kwargs): from indico.modules.events.layout.util import MenuEntryData yield MenuEntryData(title=_("Timetable"), name='timetable', endpoint='timetable.timetable', position=3, static_site=True) @signals.menu.items.connect_via('event-management-sidemenu') def _extend_event_management_menu(sender, event, **kwargs): from indico.modules.events.sessions.util import can_manage_sessions
def logger(cls): return Logger.get('plugin.{}'.format(cls.name))
# This file is part of Indico. # Copyright (C) 2002 - 2020 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from __future__ import unicode_literals from flask import current_app, request from flask_multipass import InvalidCredentials, Multipass, NoSuchUser from indico.core.logger import Logger logger = Logger.get('auth') class IndicoMultipass(Multipass): @property def default_local_auth_provider(self): """The default form-based auth provider.""" return next((p for p in self.auth_providers.itervalues() if not p.is_external and p.settings.get('default')), None) @property def sync_provider(self): """The synchronization provider. This is the identity provider used to sync user data. """
from flask import render_template, session from indico.core import signals from indico.core.logger import Logger from indico.core.permissions import ManagementPermission from indico.modules.events import Event from indico.modules.events.features.base import EventFeature from indico.modules.events.layout.util import MenuEntryData from indico.modules.events.surveys.util import query_active_surveys from indico.util.i18n import _ from indico.web.flask.templating import template_hook from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.survey') @signals.users.merged.connect def _merge_users(target, source, **kwargs): from indico.modules.events.surveys.models.submissions import SurveySubmission SurveySubmission.find(user_id=source.id).update( {SurveySubmission.user_id: target.id}) @signals.menu.items.connect_via('event-management-sidemenu') def _extend_event_management_menu(sender, event, **kwargs): if not event.has_feature('surveys') or not event.can_manage( session.user, 'surveys'): return return SideMenuItem('surveys',
from __future__ import unicode_literals from flask import session from indico.core import signals from indico.core.logger import Logger from indico.core.roles import ManagementRole from indico.modules.events import Event from indico.modules.events.models.events import EventType from indico.modules.events.tracks.clone import TrackCloner from indico.modules.events.tracks.models.tracks import Track from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('tracks') @signals.menu.items.connect_via('event-management-sidemenu') def _sidemenu_items(sender, event, **kwargs): if event.type_ == EventType.conference and event.can_manage(session.user): return SideMenuItem('program', _('Programme'), url_for('tracks.manage', event), section='organization') @signals.event.sidemenu.connect def _extend_event_menu(sender, **kwargs): from indico.modules.events.layout.util import MenuEntryData from indico.modules.events.tracks.settings import track_settings