def send_email_task(task, email, log_entry=None): attempt = task.request.retries + 1 try: do_send_email(email, log_entry, _from_task=True) except Exception as exc: delay = (DELAYS + [0])[task.request.retries] if not config.DEBUG else 1 try: task.retry(countdown=delay, max_retries=(MAX_TRIES - 1)) except MaxRetriesExceededError: if log_entry: update_email_log_state(log_entry, failed=True) db.session.commit() # store the email in case the mail server is unavailable for an # extended period so someone can recover it using `indico shell` # and possibly retry sending it path = store_failed_email(email, log_entry) logger.error('Could not send email "%s" (attempt %d/%d); giving up [%s]; stored data in %s', truncate(email['subject'], 100), attempt, MAX_TRIES, exc, path) except Retry: logger.warning('Could not send email "%s" (attempt %d/%d); retry in %ds [%s]', truncate(email['subject'], 100), attempt, MAX_TRIES, delay, exc) raise else: if task.request.retries: logger.info('Sent email "%s" (attempt %d/%d)', truncate(email['subject'], 100), attempt, MAX_TRIES) else: logger.info('Sent email "%s"', truncate(email['subject'], 100)) # commit the log entry state change if log_entry: db.session.commit()
def send_email_task(task, email, log_entry=None): attempt = task.request.retries + 1 try: do_send_email(email, log_entry, _from_task=True) except Exception as exc: delay = (DELAYS + [0])[task.request.retries] if not config.DEBUG else 1 try: task.retry(countdown=delay, max_retries=(MAX_TRIES - 1)) except MaxRetriesExceededError: if log_entry: update_email_log_state(log_entry, failed=True) db.session.commit() # store the email in case the mail server is unavailable for an # extended period so someone can recover it using `indico shell` # and possibly retry sending it path = store_failed_email(email, log_entry) logger.error( 'Could not send email "%s" (attempt %d/%d); giving up [%s]; stored data in %s', truncate(email['subject'], 100), attempt, MAX_TRIES, exc, path) except Retry: logger.warning( 'Could not send email "%s" (attempt %d/%d); retry in %ds [%s]', truncate(email['subject'], 100), attempt, MAX_TRIES, delay, exc) raise else: if task.request.retries: logger.info('Sent email "%s" (attempt %d/%d)', truncate(email['subject'], 100), attempt, MAX_TRIES) else: logger.info('Sent email "%s"', truncate(email['subject'], 100)) # commit the log entry state change if log_entry: db.session.commit()
def render_event_footer(event, dark=False): location = event.venue_name if event.room_name: location = '{} ({})'.format(event.room_name, location) description = '{}\n\n{}'.format(truncate(event.description, 1000), event.short_external_url).strip() google_calendar_params = { 'action': 'TEMPLATE', 'text': event.title, 'dates': '{}/{}'.format(event.start_dt.strftime('%Y%m%dT%H%M%SZ'), event.end_dt.strftime('%Y%m%dT%H%M%SZ')), 'details': description, 'location': location, 'trp': False, 'sprop': [event.external_url, 'name:indico'] } social_settings_data = social_settings.get_all() show_social = social_settings_data['enabled'] and layout_settings.get( event, 'show_social_badges') return render_template('events/footer.html', event=event, dark=dark, social_settings=social_settings_data, show_social=show_social, google_calendar_params=google_calendar_params)
def do_send_email(email, log_entry=None, _from_task=False): """Send an email. This function should not be called directly unless your goal is to send an email *right now* without nice error handling or retrying. For pretty much all cases where you just want to send an email, use `send_email` instead. :param email: The data describign the email, as created by `make_email` :param log_entry: An `EventLogEntry` for the email in case it was sent in the context of an event. After sending the email, the log entry's state will be updated to indicate that the email has been sent. :param _from_task: Indicates that this function is called from the celery task responsible for sending emails. """ with EmailBackend(timeout=config.SMTP_TIMEOUT) as conn: msg = EmailMessage(subject=email['subject'], body=email['body'], from_email=email['from'], to=email['to'], cc=email['cc'], bcc=email['bcc'], reply_to=email['reply_to'], attachments=email['attachments'], connection=conn) if not msg.to: msg.extra_headers['To'] = 'Undisclosed-recipients:;' if email['html']: msg.content_subtype = 'html' msg.send() if not _from_task: logger.info('Sent email "%s"', truncate(email['subject'], 100)) if log_entry: update_email_log_state(log_entry)
def flush_email_queue(): """Send all the emails in the queue. Note: This function does a database commit to update states in case of failures or immediately-sent emails. It should only be called if the session is in a state safe to commit or after doing a commit/rollback of any other changes that might have been pending. """ from indico.core.emails import store_failed_email, update_email_log_state queue = g.get('email_queue', []) if not queue: return logger.debug('Sending %d queued emails', len(queue)) for fn, email, log_entry in queue: try: fn(email, log_entry) except Exception: # Flushing the email queue happens after a commit. # If anything goes wrong here we keep going and just log # it to avoid losing (more) emails in case celery is not # used for email sending or there is a temporary issue # with celery. if log_entry: update_email_log_state(log_entry, failed=True) path = store_failed_email(email, log_entry) logger.exception( 'Flushing queued email "%s" failed; stored data in %s', truncate(email['subject'], 100), path) # Wait for a short moment in case it's a very temporary issue time.sleep(0.25) del queue[:] db.session.commit()
def flush_email_queue(): """Send all the emails in the queue. Note: This function does a database commit to update states in case of failures or immediately-sent emails. It should only be called if the session is in a state safe to commit or after doing a commit/rollback of any other changes that might have been pending. """ from indico.core.emails import store_failed_email, update_email_log_state queue = g.get('email_queue', []) if not queue: return logger.debug('Sending %d queued emails', len(queue)) for fn, email, log_entry in queue: try: fn(email, log_entry) except Exception: # Flushing the email queue happens after a commit. # If anything goes wrong here we keep going and just log # it to avoid losing (more) emails in case celery is not # used for email sending or there is a temporary issue # with celery. if log_entry: update_email_log_state(log_entry, failed=True) path = store_failed_email(email, log_entry) logger.exception('Flushing queued email "%s" failed; stored data in %s', truncate(email['subject'], 100), path) # Wait for a short moment in case it's a very temporary issue time.sleep(0.25) del queue[:] db.session.commit()
def do_send_email(email, log_entry=None, _from_task=False): """Send an email. This function should not be called directly unless your goal is to send an email *right now* without nice error handling or retrying. For pretty much all cases where you just want to send an email, use `send_email` instead. :param email: The data describign the email, as created by `make_email` :param log_entry: An `EventLogEntry` for the email in case it was sent in the context of an event. After sending the email, the log entry's state will be updated to indicate that the email has been sent. :param _from_task: Indicates that this function is called from the celery task responsible for sending emails. """ with get_connection() as conn: msg = EmailMessage(subject=email['subject'], body=email['body'], from_email=email['from'], to=email['to'], cc=email['cc'], bcc=email['bcc'], reply_to=email['reply_to'], attachments=email['attachments'], connection=conn) if not msg.to: msg.extra_headers['To'] = 'Undisclosed-recipients:;' if email['html']: msg.content_subtype = 'html' msg.extra_headers['message-id'] = make_msgid(domain=url_parse(config.BASE_URL).host) msg.send() if not _from_task: logger.info('Sent email "%s"', truncate(email['subject'], 100)) if log_entry: update_email_log_state(log_entry)
def set(self, key, val, ttl=0): try: if ttl: self._client.setex(key, ttl, pickle.dumps(val)) else: self._client.set(key, pickle.dumps(val)) except redis.RedisError: val_repr = truncate(repr(val), 1000) Logger.get('cache.redis').exception('set(%r, %s, %r) failed', key, val_repr, ttl)
def _getTruncatedParams(self): """Truncates params""" params = {} for key, value in self._reqParams.iteritems(): if key in {'password', 'confirm_password'}: params[key] = '[password hidden, len=%d]' % len(value) elif isinstance(value, basestring): params[key] = truncate(value, 1024) else: params[key] = value return params
def render_event_footer(event, dark=False): location = event.venue_name if event.room_name: location = '{} ({})'.format(event.room_name, location) description = '{}\n\n{}'.format(truncate(event.description, 1000), event.short_external_url).strip() google_calendar_params = { 'action': 'TEMPLATE', 'text': event.title, 'dates': '{}/{}'.format(event.start_dt.strftime('%Y%m%dT%H%M%SZ'), event.end_dt.strftime('%Y%m%dT%H%M%SZ')), 'details': description, 'location': location, 'trp': False, 'sprop': [event.external_url, 'name:indico'] } social_settings_data = social_settings.get_all() show_social = social_settings_data['enabled'] and layout_settings.get(event, 'show_social_badges') return render_template('events/footer.html', event=event, dark=dark, social_settings=social_settings_data, show_social=show_social, google_calendar_params=google_calendar_params)
def resend_failed_emails_cmd(paths): for path in paths: email = resend_failed_email(path) click.secho('Email sent: "{}" ({})'.format( truncate(email['subject'], 100), os.path.basename(path)), fg='green')
def getTalks(conference, sort=False): """ sort: if True, contributions are sorted by start date (non scheduled contributions at the end) """ # Logger.get('RecMan').debug("in getTalks()") # max length for title string title_length = 39 # recordable_events is my own list of tags for each recordable event # which will be used by the tpl file. recordable_events = [] talks = [] speaker_str = "" speaker_list = conference.getChairList() if speaker_list is not None: speaker_str = ", ".join([ "%s %s" % (speaker.getFirstName(), speaker.getFamilyName()) for speaker in speaker_list ]) event_info = {} event_info["contId"] = "" event_info["speakers"] = speaker_str event_info["type"] = "conference" event_info["IndicoID"] = generateIndicoID(conference=conference.getId(), session=None, contribution=None, subcontribution=None) event_info["title"] = conference.getTitle() event_info["titleshort"] = truncate(event_info["title"], 40) # this always comes first, so just pretend it's 0 seconds past the epoch event_info["date"] = int( time.mktime(conference.getAdjustedStartDate().timetuple())) event_info["LOID"] = "" event_info["IndicoLink"] = doesExistIndicoLink(conference) recordable_events.append(event_info) # Posters are contributions that are not recordable, # so filter them out here filter = PosterFilterField(conference, False, False) for contribution in conference.getContributionList(): if filter.satisfies(contribution): talks.append(contribution) speaker_str = "" speaker_list = contribution.getSpeakerList() if speaker_list is not None: speaker_str = ", ".join([ "%s %s" % (speaker.getFirstName(), speaker.getFamilyName()) for speaker in speaker_list ]) event_info = {} event_info["contId"] = contribution.getId() event_info["speakers"] = speaker_str event_info["type"] = "contribution" event_info["IndicoID"] = generateIndicoID( conference=conference.getId(), session=None, contribution=contribution.getId(), subcontribution=None) event_info["title"] = contribution.getTitle() event_info["titleshort"] = truncate(event_info["title"], title_length) # Sometimes contributions are not scheduled, so they have no start date. # In this case assign it the value None, and it will be displayed # at the end of the list with the time value "not scheduled" if contribution.getAdjustedStartDate() is not None: event_info["date"] = int( time.mktime( contribution.getAdjustedStartDate().timetuple())) else: event_info["date"] = None event_info["LOID"] = "" event_info["IndicoLink"] = doesExistIndicoLink(contribution) recordable_events.append(event_info) ctr_sc = 0 for subcontribution in contribution.getSubContributionList(): ctr_sc += 1 event_info = {} event_info["contId"] = contribution.getId() speaker_str = "" speaker_list = subcontribution.getSpeakerList() if speaker_list is not None: speaker_str = ", ".join([ "%s %s" % (speaker.getFirstName(), speaker.getFamilyName()) for speaker in speaker_list ]) event_info["speakers"] = speaker_str event_info["type"] = "subcontribution" event_info["IndicoID"] = generateIndicoID( conference=conference.getId(), session=None, contribution=contribution.getId(), subcontribution=subcontribution.getId()) event_info["title"] = subcontribution.getTitle() event_info["titleshort"] = truncate(event_info["title"], title_length) # Subcontribution objects don't have start dates, # so get the owner contribution's start date # and add the counter ctr_sc so they appear in order if subcontribution.getOwner().getAdjustedStartDate( ) is not None: event_info["date"] = int( time.mktime(subcontribution.getOwner( ).getAdjustedStartDate().timetuple()) + ctr_sc) else: event_info["date"] = int( time.mktime(conference.getAdjustedStartDate(). timetuple())) + ctr_sc event_info["LOID"] = "" event_info["IndicoLink"] = doesExistIndicoLink(subcontribution) recordable_events.append(event_info) for session in conference.getSessionList(): event_info = {} event_info["contId"] = "" event_info["speakers"] = "" event_info["type"] = "session" event_info["IndicoID"] = generateIndicoID( conference=conference.getId(), session=session.getId(), contribution=None, subcontribution=None) event_info["title"] = session.getTitle() event_info["titleshort"] = truncate(event_info["title"], title_length) # Get start time as seconds since the epoch so we can sort if session.getAdjustedStartDate() is not None: event_info["date"] = int( time.mktime(session.getAdjustedStartDate().timetuple())) else: event_info["date"] = None event_info["LOID"] = "" event_info["IndicoLink"] = doesExistIndicoLink(session) recordable_events.append(event_info) # Get list of matching IndicoIDs and CDS records from CDS cds_indico_matches = getCDSRecords(conference.getId()) # Logger.get('RecMan').debug('cds_indico_pending...') cds_indico_pending = MicalaCommunication.getCDSPending(conference.getId()) # In case there are any records that were pending and are now appearing in CDS, # then update the micala database accordingly. MicalaCommunication.updateMicalaCDSExport(cds_indico_matches, cds_indico_pending) # Logger.get('RecMan').debug("cds_indico_matches: %s, cds_indico_pending: %s" % (cds_indico_matches, cds_indico_pending)) for event_info in recordable_events: try: event_info["CDSID"] = cds_indico_matches[event_info["IndicoID"]] event_info["CDSURL"] = CollaborationTools.getOptionValue( "RecordingManager", "CDSBaseURL") % event_info["CDSID"] except KeyError: # Logger.get('RecMan').debug("Following talk not in CDS: %s" % event_info["title"]) if cds_indico_pending is not None and event_info[ "IndicoID"] in set(cds_indico_pending): event_info["CDSID"] = 'pending' event_info["CDSURL"] = "" else: event_info["CDSID"] = "none" event_info["CDSURL"] = "" # Get list of matching IndicoID's and LOIDs from the Micala database existing_matches = MicalaCommunication.getMatches(conference.getId()) # insert any existing matches into the recordable_events array for talk in recordable_events: # Look up IndicoID in existing_matches dictionary try: matching_LOID = existing_matches[talk["IndicoID"]] # If not found, do nothing (talk["LOID"] should already be assigned to "" by default) except KeyError: pass # If there is a matching_LOID, assign it to talk["LOID"] else: talk["LOID"] = matching_LOID # Now that we have all the micala, CDS and IndicoLink info, set up the bg images for talk in recordable_events: talk["bg"] = chooseBGColor(talk) # Format dates for each talk for pleasing display for talk in recordable_events: talk["date_nice"] = formatDate(talk["date"]) # Next, sort the list of events by startDate for display purposes recordable_events.sort(startTimeCompare) # Logger.get('RecMan').debug('leaving getTalks()') return recordable_events
def resend_failed_emails_cmd(paths): for path in paths: email = resend_failed_email(path) click.secho('Email sent: "{}" ({})'.format(truncate(email['subject'], 100), os.path.basename(path)), fg='green')
def getTalks(conference, sort = False): """ sort: if True, contributions are sorted by start date (non scheduled contributions at the end) """ # Logger.get('RecMan').debug("in getTalks()") # max length for title string title_length = 39 # recordable_events is my own list of tags for each recordable event # which will be used by the tpl file. recordable_events = [] talks = [] speaker_str = "" speaker_list = conference.getChairList() if speaker_list is not None: speaker_str = ", ".join(["%s %s"%(speaker.getFirstName(), speaker.getFamilyName()) for speaker in speaker_list]) event_info = {} event_info["contId"] = "" event_info["speakers"] = speaker_str event_info["type"] = "conference" event_info["IndicoID"] = generateIndicoID(conference = conference.getId(), session = None, contribution = None, subcontribution = None) event_info["title"] = conference.getTitle() event_info["titleshort"] = truncate(event_info["title"], 40) # this always comes first, so just pretend it's 0 seconds past the epoch event_info["date"] = int(time.mktime(conference.getAdjustedStartDate().timetuple())) event_info["LOID"] = "" event_info["IndicoLink"] = doesExistIndicoLink(conference) recordable_events.append(event_info) # Posters are contributions that are not recordable, # so filter them out here filter = PosterFilterField(conference, False, False) for contribution in conference.getContributionList(): if filter.satisfies(contribution): talks.append(contribution) speaker_str = "" speaker_list = contribution.getSpeakerList() if speaker_list is not None: speaker_str = ", ".join(["%s %s"%(speaker.getFirstName(), speaker.getFamilyName()) for speaker in speaker_list]) event_info = {} event_info["contId"] = contribution.getId() event_info["speakers"] = speaker_str event_info["type"] = "contribution" event_info["IndicoID"] = generateIndicoID(conference = conference.getId(), session = None, contribution = contribution.getId(), subcontribution = None) event_info["title"] = contribution.getTitle() event_info["titleshort"] = truncate(event_info["title"], title_length) # Sometimes contributions are not scheduled, so they have no start date. # In this case assign it the value None, and it will be displayed # at the end of the list with the time value "not scheduled" if contribution.getAdjustedStartDate() is not None: event_info["date"] = int(time.mktime(contribution.getAdjustedStartDate().timetuple())) else: event_info["date"] = None event_info["LOID"] = "" event_info["IndicoLink"] = doesExistIndicoLink(contribution) recordable_events.append(event_info) ctr_sc = 0 for subcontribution in contribution.getSubContributionList(): ctr_sc += 1 event_info = {} event_info["contId"] = contribution.getId() speaker_str = "" speaker_list = subcontribution.getSpeakerList() if speaker_list is not None: speaker_str = ", ".join(["%s %s"%(speaker.getFirstName(), speaker.getFamilyName()) for speaker in speaker_list]) event_info["speakers"] = speaker_str event_info["type"] = "subcontribution" event_info["IndicoID"] = generateIndicoID(conference = conference.getId(), session = None, contribution = contribution.getId(), subcontribution = subcontribution.getId()) event_info["title"] = subcontribution.getTitle() event_info["titleshort"] = truncate(event_info["title"], title_length) # Subcontribution objects don't have start dates, # so get the owner contribution's start date # and add the counter ctr_sc so they appear in order if subcontribution.getOwner().getAdjustedStartDate() is not None: event_info["date"] = int(time.mktime(subcontribution.getOwner().getAdjustedStartDate().timetuple()) + ctr_sc) else: event_info["date"] = int(time.mktime(conference.getAdjustedStartDate().timetuple())) + ctr_sc event_info["LOID"] = "" event_info["IndicoLink"] = doesExistIndicoLink(subcontribution) recordable_events.append(event_info) for session in conference.getSessionList(): event_info = {} event_info["contId"] = "" event_info["speakers"] = "" event_info["type"] = "session" event_info["IndicoID"] = generateIndicoID(conference = conference.getId(), session = session.getId(), contribution = None, subcontribution = None) event_info["title"] = session.getTitle() event_info["titleshort"] = truncate(event_info["title"], title_length) # Get start time as seconds since the epoch so we can sort if session.getAdjustedStartDate() is not None: event_info["date"] = int(time.mktime(session.getAdjustedStartDate().timetuple())) else: event_info["date"] = None event_info["LOID"] = "" event_info["IndicoLink"] = doesExistIndicoLink(session) recordable_events.append(event_info) # Get list of matching IndicoIDs and CDS records from CDS cds_indico_matches = getCDSRecords(conference.getId()) # Logger.get('RecMan').debug('cds_indico_pending...') cds_indico_pending = MicalaCommunication.getCDSPending(conference.getId()) # In case there are any records that were pending and are now appearing in CDS, # then update the micala database accordingly. MicalaCommunication.updateMicalaCDSExport(cds_indico_matches, cds_indico_pending) # Logger.get('RecMan').debug("cds_indico_matches: %s, cds_indico_pending: %s" % (cds_indico_matches, cds_indico_pending)) for event_info in recordable_events: try: event_info["CDSID"] = cds_indico_matches[event_info["IndicoID"]] event_info["CDSURL"] = CollaborationTools.getOptionValue("RecordingManager", "CDSBaseURL") % event_info["CDSID"] except KeyError: # Logger.get('RecMan').debug("Following talk not in CDS: %s" % event_info["title"]) if cds_indico_pending is not None and event_info["IndicoID"] in set(cds_indico_pending): event_info["CDSID"] = 'pending' event_info["CDSURL"] = "" else: event_info["CDSID"] = "none" event_info["CDSURL"] = "" # Get list of matching IndicoID's and LOIDs from the Micala database existing_matches = MicalaCommunication.getMatches(conference.getId()) # insert any existing matches into the recordable_events array for talk in recordable_events: # Look up IndicoID in existing_matches dictionary try: matching_LOID = existing_matches[talk["IndicoID"]] # If not found, do nothing (talk["LOID"] should already be assigned to "" by default) except KeyError: pass # If there is a matching_LOID, assign it to talk["LOID"] else: talk["LOID"] = matching_LOID # Now that we have all the micala, CDS and IndicoLink info, set up the bg images for talk in recordable_events: talk["bg"] = chooseBGColor(talk) # Format dates for each talk for pleasing display for talk in recordable_events: talk["date_nice"] = formatDate(talk["date"]) # Next, sort the list of events by startDate for display purposes recordable_events.sort(startTimeCompare) # Logger.get('RecMan').debug('leaving getTalks()') return recordable_events