def put_delete_undo(self, item_id): require_current_user_is_owner(int(item_id)) item_id = int(item_id) content_api = ContentApi(tmpl_context.current_user, True, True) # Here we do not filter deleted items item = content_api.get_one(item_id, self._item_type, tmpl_context.workspace) try: next_url = tg.url('/workspaces/{}/folders/{}/threads/{}').format(tmpl_context.workspace_id, tmpl_context.folder_id, tmpl_context.thread_id) msg = _('{} undeleted.').format(self._item_type_label) content_api.undelete(item) content_api.save(item, ActionDescription.UNDELETION) tg.flash(msg, CST.STATUS_OK) tg.redirect(next_url) except ValueError as e: logger.debug(self, 'Exception: {}'.format(e.__str__)) back_url = tg.url('/workspaces/{}/folders/{}/threads/{}').format(tmpl_context.workspace_id, tmpl_context.folder_id, tmpl_context.thread_id) msg = _('{} not un-deleted: {}').format(self._item_type_label, str(e)) tg.flash(msg, CST.STATUS_ERROR) tg.redirect(back_url)
def put_delete_undo(self, item_id): # TODO - CHECK RIGHTS item_id = int(item_id) # Here we do not filter deleted items content_api = ContentApi(tmpl_context.current_user, True, True) item = content_api.get_one(item_id, self._item_type, tmpl_context.workspace) try: next_url = self._std_url.format(item.workspace_id, item.content_id) msg = _('{} undeleted.').format(self._item_type_label) with new_revision(item): content_api.undelete(item) content_api.save(item, ActionDescription.UNDELETION) tg.flash(msg, CST.STATUS_OK) tg.redirect(next_url) except ValueError as e: logger.debug(self, 'Exception: {}'.format(e.__str__)) back_url = self._parent_url.format(item.workspace_id, item.parent_id) msg = _('{} not un-deleted: {}').format(self._item_type_label, str(e)) tg.flash(msg, CST.STATUS_ERROR) tg.redirect(back_url)
def put_delete_undo(self, item_id): require_current_user_is_owner(int(item_id)) item_id = int(item_id) # Here we do not filter deleted items content_api = ContentApi(tmpl_context.current_user, True, True) item = content_api.get_one(item_id, self._item_type, tmpl_context.workspace) next_or_back = '/workspaces/{}/folders/{}/threads/{}' try: next_url = tg.url(next_or_back).format(tmpl_context.workspace_id, tmpl_context.folder_id, tmpl_context.thread_id) msg = _('{} undeleted.').format(self._item_type_label) with new_revision(item): content_api.undelete(item) content_api.save(item, ActionDescription.UNDELETION) tg.flash(msg, CST.STATUS_OK) tg.redirect(next_url) except ValueError as e: logger.debug(self, 'Exception: {}'.format(e.__str__)) back_url = tg.url(next_or_back).format(tmpl_context.workspace_id, tmpl_context.folder_id, tmpl_context.thread_id) msg = _('{} not un-deleted: {}').format(self._item_type_label, str(e)) tg.flash(msg, CST.STATUS_ERROR) tg.redirect(back_url)
def _notify_tracim( self, mails: typing.List[DecodedMail], ) -> None: """ Send http request to tracim endpoint :param mails: list of mails to send :return: unsended mails """ logger.debug( self, 'Notify tracim about {} new responses'.format(len(mails), )) unsended_mails = [] # TODO BS 20171124: Look around mail.get_from_address(), mail.get_key() # , mail.get_body() etc ... for raise InvalidEmailError if missing # required informations (actually get_from_address raise IndexError # if no from address for example) and catch it here while mails: mail = mails.pop() msg = { 'token': self.token, 'user_mail': mail.get_from_address(), 'content_id': mail.get_key(), 'payload': { 'content': mail.get_body(use_html_parsing=self.use_html_parsing, use_txt_parsing=self.use_txt_parsing), } } try: logger.debug( self, 'Contact API on {} with body {}'.format( self.endpoint, json.dumps(msg), ), ) r = requests.post(self.endpoint, json=msg) if r.status_code not in [200, 204]: details = r.json().get('msg') log = 'bad status code {} response when sending mail to tracim: {}' # nopep8 logger.error(self, log.format( str(r.status_code), details, )) # Flag all correctly checked mail, unseen the others if r.status_code in [200, 204, 400]: self._set_flag(mail.uid, IMAP_CHECKED_FLAG) else: self._unset_flag(mail.uid, IMAP_SEEN_FLAG) # TODO - G.M - Verify exception correctly works except requests.exceptions.Timeout as e: log = 'Timeout error to transmit fetched mail to tracim : {}' logger.error(self, log.format(str(e))) unsended_mails.append(mail) self._unset_flag(mail.uid, IMAP_SEEN_FLAG) except requests.exceptions.RequestException as e: log = 'Fail to transmit fetched mail to tracim : {}' logger.error(self, log.format(str(e))) self._unset_flag(mail.uid, IMAP_SEEN_FLAG)
def _connect(self) -> None: # TODO - G.M - 2017-11-15 Verify connection/disconnection # Are old connexion properly close this way ? if self._connection: logger.debug(self, 'Disconnect from IMAP') self._disconnect() # TODO - G.M - 2017-11-23 Support for predefined SSLContext ? # without ssl_context param, tracim use default security configuration # which is great in most case. if self.use_ssl: logger.debug( self, 'Connect IMAP {}:{} using SSL'.format( self.host, self.port, )) self._connection = imaplib.IMAP4_SSL(self.host, self.port) else: logger.debug(self, 'Connect IMAP {}:{}'.format( self.host, self.port, )) self._connection = imaplib.IMAP4(self.host, self.port) try: logger.debug(self, 'Login IMAP with login {}'.format(self.user, )) self._connection.login(self.user, self.password) except Exception as e: log = 'Error during execution: {}' logger.error(self, log.format(e.__str__()), exc_info=1)
def connect(self): if not self._smtp_connection: log = 'Connecting from SMTP server {}' logger.info(self, log.format(self._smtp_config.server)) self._smtp_connection = smtplib.SMTP(self._smtp_config.server, self._smtp_config.port) self._smtp_connection.ehlo() if self._smtp_config.login: try: starttls_result = self._smtp_connection.starttls() log = 'SMTP start TLS result: {}' logger.debug(self, log.format(starttls_result)) except Exception as e: log = 'SMTP start TLS error: {}' logger.debug(self, log.format(e.__str__())) if self._smtp_config.login: try: login_res = self._smtp_connection.login( self._smtp_config.login, self._smtp_config.password) log = 'SMTP login result: {}' logger.debug(self, log.format(login_res)) except Exception as e: log = 'SMTP login error: {}' logger.debug(self, log.format(e.__str__())) logger.info(self, 'Connection OK')
def connect(self): if not self._smtp_connection: log = 'Connecting from SMTP server {}' logger.info(self, log.format(self._smtp_config.server)) self._smtp_connection = smtplib.SMTP( self._smtp_config.server, self._smtp_config.port ) self._smtp_connection.ehlo() if self._smtp_config.login: try: starttls_result = self._smtp_connection.starttls() log = 'SMTP start TLS result: {}' logger.debug(self, log.format(starttls_result)) except Exception as e: log = 'SMTP start TLS error: {}' logger.debug(self, log.format(e.__str__())) if self._smtp_config.login: try: login_res = self._smtp_connection.login( self._smtp_config.login, self._smtp_config.password ) log = 'SMTP login result: {}' logger.debug(self, log.format(login_res)) except Exception as e: log = 'SMTP login error: {}' logger.debug(self, log.format(e.__str__())) logger.info(self, 'Connection OK')
def put_unread(self, item_id): item_id = int(item_id) content_api = ContentApi(tmpl_context.current_user, True, True) # Here we do not filter deleted items item = content_api.get_one(item_id, self._item_type, tmpl_context.workspace) item_url = self._std_url.format(item.workspace_id, item.parent_id, item.content_id) try: msg = _('{} marked unread.').format(self._item_type_label) content_api.mark_unread(item) tg.flash(msg, CST.STATUS_OK) tg.redirect(item_url) except ValueError as e: logger.debug(self, 'Exception: {}'.format(e.__str__)) msg = _('{} not marked unread: {}').format(self._item_type_label, str(e)) tg.flash(msg, CST.STATUS_ERROR) tg.redirect(item_url)
def _unset_flag( self, uid: int, flag: str, ) -> None: assert uid is not None rv, data = self._connection.store( uid, '-FLAGS', flag, ) if rv == 'OK': log = 'Message {uid} unset as {flag}.'.format(uid=uid, flag=flag) logger.debug(self, log) else: log = 'Can not unset Message {uid} as {flag} : {rv}'.format( uid=uid, flag=flag, rv=rv) logger.error(self, log)
def run(self) -> None: logger.info(self, 'Starting MailFetcher') while self._is_active: logger.debug(self, 'sleep for {}'.format(self.delay)) time.sleep(self.delay) try: self._connect() with self.lock.acquire(timeout=MAIL_FETCHER_FILELOCK_TIMEOUT): messages = self._fetch() cleaned_mails = [ DecodedMail(m.message, m.uid) for m in messages ] self._notify_tracim(cleaned_mails) self._disconnect() except filelock.Timeout as e: log = 'Mail Fetcher Lock Timeout {}' logger.warning(self, log.format(e.__str__())) except Exception as e: # TODO - G.M - 2017-11-23 - Identify possible exceptions log = 'IMAP error: {}' logger.warning(self, log.format(e.__str__()))
def put_delete_undo(self, item_id): # TODO - CHECK RIGHTS item_id = int(item_id) content_api = ContentApi(tmpl_context.current_user, True, True) # Here we do not filter deleted items item = content_api.get_one(item_id, self._item_type, tmpl_context.workspace) try: next_url = self._std_url.format(item.workspace_id, item.parent_id, item.content_id) msg = _('{} undeleted.').format(self._item_type_label) content_api.undelete(item) content_api.save(item, ActionDescription.UNDELETION) tg.flash(msg, CST.STATUS_OK) tg.redirect(next_url) except ValueError as e: logger.debug(self, 'Exception: {}'.format(e.__str__)) back_url = self._parent_url.format(item.workspace_id, item.parent_id) msg = _('{} not un-deleted: {}').format(self._item_type_label, str(e)) tg.flash(msg, CST.STATUS_ERROR) tg.redirect(back_url)
def setup_app(section_name=None): """Setup the application.""" engine = config['tg.app_globals'].sa_engine inspector = reflection.Inspector.from_engine(engine) metadata = MetaData() logger.debug(setup_app, 'Before setup...') cmd = SetupAppCommand(Bunch(options=Bunch(verbose_level=1)), Bunch()) logger.debug(setup_app, 'After setup, before run...') cmd.run(Bunch(config_file='config:test.ini', section_name=section_name)) logger.debug(setup_app, 'After run...')
def _fetch( self, imapc: imapclient.IMAPClient, ) -> typing.List[MessageContainer]: """ Get news message from mailbox :return: list of new mails """ messages = [] logger.debug(self, 'Fetch unflagged messages') uids = imapc.search(['UNFLAGGED']) logger.debug(self, 'Found {} unflagged mails'.format(len(uids), )) for msgid, data in imapc.fetch(uids, ['BODY.PEEK[]']).items(): # INFO - G.M - 2017-12-08 - Fetch BODY.PEEK[] # Retrieve all mail(body and header) but don't set mail # as seen because of PEEK # see rfc3501 logger.debug(self, 'Fetch mail "{}"'.format(msgid, )) try: msg = message_from_bytes(data[b'BODY[]']) except KeyError as e: # INFO - G.M - 12-01-2018 - Fetch may return events response # In some specific case, fetch command may return events # response unrelated to fetch request. # This should happen only when someone-else use the mailbox # at the same time of the fetcher. # see https://github.com/mjs/imapclient/issues/334 except_msg = 'fetch response : {}'.format(str(data)) raise BadIMAPFetchResponse(except_msg) from e msg_container = MessageContainer(msg, msgid) messages.append(msg_container) return messages
def notify_content_update(self, event_actor_id: int, event_content_id: int): """ Look for all users to be notified about the new content and send them an individual email :param event_actor_id: id of the user that has triggered the event :param event_content_id: related content_id :return: """ # FIXME - D.A. - 2014-11-05 # Dirty import. It's here in order to avoid circular import from tracim.lib.content import ContentApi user = UserApi(None).get_one(event_actor_id) logger.debug(self, 'Content: {}'.format(event_content_id)) content = ContentApi( user, show_archived=True, show_deleted=True ).get_one( event_content_id, ContentType.Any ) # TODO - use a system user instead of the user that has triggered the event main_content = content.parent if content.type == ContentType.Comment else content notifiable_roles = WorkspaceApi(user).get_notifiable_roles( content.workspace) if len(notifiable_roles) <= 0: logger.info( self, 'Skipping notification as nobody subscribed to in workspace {}' .format(content.workspace.label)) return logger.info( self, 'Sending asynchronous emails to {} user(s)'.format( len(notifiable_roles))) # INFO - D.A. - 2014-11-06 # The following email sender will send emails in the async task queue # This allow to build all mails through current thread but really send them (including SMTP connection) # In the other thread. # # This way, the webserver will return sooner (actually before notification emails are sent async_email_sender = EmailSender( self._smtp_config, self._global_config.EMAIL_NOTIFICATION_ACTIVATED) for role in notifiable_roles: logger.info(self, 'Sending email to {}'.format(role.user.email)) to_addr = '{name} <{email}>'.format(name=role.user.display_name, email=role.user.email) # # INFO - D.A. - 2014-11-06 # We do not use .format() here because the subject defined in the .ini file # may not include all required labels. In order to avoid partial format() (which result in an exception) # we do use replace and force the use of .__str__() in order to process LazyString objects # subject = self._global_config.EMAIL_NOTIFICATION_CONTENT_UPDATE_SUBJECT subject = subject.replace( EST.WEBSITE_TITLE, self._global_config.WEBSITE_TITLE.__str__()) subject = subject.replace(EST.WORKSPACE_LABEL, main_content.workspace.label.__str__()) subject = subject.replace(EST.CONTENT_LABEL, main_content.label.__str__()) subject = subject.replace( EST.CONTENT_STATUS_LABEL, main_content.get_status().label.__str__()) message = MIMEMultipart('alternative') message['Subject'] = subject message['From'] = self._get_sender(user) message['To'] = to_addr body_text = self._build_email_body( self._global_config. EMAIL_NOTIFICATION_CONTENT_UPDATE_TEMPLATE_TEXT, role, content, user) body_html = self._build_email_body( self._global_config. EMAIL_NOTIFICATION_CONTENT_UPDATE_TEMPLATE_HTML, role, content, user) part1 = MIMEText(body_text, 'plain', 'utf-8') part2 = MIMEText(body_html, 'html', 'utf-8') # Attach parts into message container. # According to RFC 2046, the last part of a multipart message, in this case # the HTML message, is best and preferred. message.attach(part1) message.attach(part2) send_email_through(async_email_sender.send_mail, message)
def setUp(self): self.app = load_app(self.application_under_test) logger.debug(self, 'Start setUp() by trying to clean database...') try: teardown_db() except Exception as e: logger.debug(self, 'teardown() throwed an exception {}'.format(e.__str__())) logger.debug(self, 'Start setUp() by trying to clean database... -> done') logger.debug(self, 'Start Application Setup...') setup_app() logger.debug(self, 'Start Application Setup... -> done') logger.debug(self, 'Start Database Setup...') setup_db() logger.debug(self, 'Start Database Setup... -> done') logger.debug(self, 'Load extra fixtures...') fixtures_loader = FixturesLoader([BaseFixture]) # BaseFixture is already loaded in bootstrap fixtures_loader.loads(self.fixtures) logger.debug(self, 'Load extra fixtures... -> done') self.app.get('/_test_vars') # Allow to create fake context tg.i18n.set_lang(['en', ]) # Set a default lang
def teardown_db(): """Destroy the database schema.""" engine = config['tg.app_globals'].sa_engine connection = engine.connect() # INFO - D.A. - 2014-12-04 # Recipe taken from bitbucket: # https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DropEverything inspector = reflection.Inspector.from_engine(engine) metadata = MetaData() tbs = [] all_fks = [] views = [] # INFO - D.A. - 2014-12-04 # Sequences are hard defined here because SQLA does not allow to reflect them from existing schema seqs = [ Sequence('seq__groups__group_id'), Sequence('seq__contents__content_id'), Sequence('seq__content_revisions__revision_id'), Sequence('seq__permissions__permission_id'), Sequence('seq__users__user_id'), Sequence('seq__workspaces__workspace_id') ] for view_name in inspector.get_view_names(): v = Table(view_name,metadata) views.append(v) for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( ForeignKeyConstraint((),(),name=fk['name']) ) t = Table(table_name,metadata,*fks) tbs.append(t) all_fks.extend(fks) if not config['sqlalchemy.url'].startswith('sqlite'): for fkc in all_fks: connection.execute(DropConstraint(fkc)) for view in views: drop_statement = 'DROP VIEW {}'.format(view.name) # engine.execute(drop_statement) connection.execute(drop_statement) for table in tbs: connection.execute(DropTable(table)) for sequence in seqs: try: connection.execute(DropSequence(sequence)) except Exception as e: logger.debug(teardown_db, 'Exception while trying to remove sequence {}'.format(sequence.name)) transaction.commit() connection.close() engine.dispose()
def wrapper_func(*args, **kwargs): start = time.time() retval = func(*args, **kwargs) end = time.time() logger.debug(func, 'exec time: {} seconds'.format(end - start)) return retval
def _build_email_body(self, mako_template_filepath: str, role: UserRoleInWorkspace, content: Content, actor: User) -> str: """ Build an email body and return it as a string :param mako_template_filepath: the absolute path to the mako template to be used for email body building :param role: the role related to user to whom the email must be sent. The role is required (and not the user only) in order to show in the mail why the user receive the notification :param content: the content item related to the notification :param actor: the user at the origin of the action / notification (for example the one who wrote a comment :param config: the global configuration :return: the built email body as string. In case of multipart email, this method must be called one time for text and one time for html """ logger.debug(self, 'Building email content from MAKO template {}'.format(mako_template_filepath)) template = Template(filename=mako_template_filepath) # TODO - D.A. - 2014-11-06 - move this # Import is here for circular import problem import tracim.lib.helpers as helpers dictified_item = Context(CTX.EMAIL_NOTIFICATION, self._global_config.WEBSITE_BASE_URL).toDict(content) dictified_actor = Context(CTX.DEFAULT).toDict(actor) main_title = dictified_item.label content_intro = '' content_text = '' call_to_action_text = '' action = content.get_last_action().id if ActionDescription.COMMENT == action: content_intro = _('<span id="content-intro-username">{}</span> added a comment:').format(actor.display_name) content_text = content.description call_to_action_text = _('Answer') elif ActionDescription.CREATION == action: # Default values (if not overriden) content_text = content.description call_to_action_text = _('View online') if ContentType.Thread == content.type: call_to_action_text = _('Answer') content_intro = _('<span id="content-intro-username">{}</span> started a thread entitled:').format(actor.display_name) content_text = '<p id="content-body-intro">{}</p>'.format(content.label) + \ content.get_last_comment_from(actor).description elif ContentType.File == content.type: content_intro = _('<span id="content-intro-username">{}</span> added a file entitled:').format(actor.display_name) if content.description: content_text = content.description else: content_text = '<span id="content-body-only-title">{}</span>'.format(content.label) elif ContentType.Page == content.type: content_intro = _('<span id="content-intro-username">{}</span> added a page entitled:').format(actor.display_name) content_text = '<span id="content-body-only-title">{}</span>'.format(content.label) elif ActionDescription.REVISION == action: content_text = content.description call_to_action_text = _('View online') if ContentType.File == content.type: content_intro = _('<span id="content-intro-username">{}</span> uploaded a new revision.').format(actor.display_name) content_text = '' elif ContentType.Page == content.type: content_intro = _('<span id="content-intro-username">{}</span> updated this page.').format(actor.display_name) previous_revision = content.get_previous_revision() title_diff = '' if previous_revision.label != content.label: title_diff = htmldiff(previous_revision.label, content.label) content_text = _('<p id="content-body-intro">Here is an overview of the changes:</p>')+ \ title_diff + \ htmldiff(previous_revision.description, content.description) elif ContentType.Thread == content.type: content_intro = _('<span id="content-intro-username">{}</span> updated the thread description.').format(actor.display_name) previous_revision = content.get_previous_revision() title_diff = '' if previous_revision.label != content.label: title_diff = htmldiff(previous_revision.label, content.label) content_text = _('<p id="content-body-intro">Here is an overview of the changes:</p>')+ \ title_diff + \ htmldiff(previous_revision.description, content.description) # elif ContentType.Thread == content.type: # content_intro = _('<span id="content-intro-username">{}</span> updated this page.').format(actor.display_name) # previous_revision = content.get_previous_revision() # content_text = _('<p id="content-body-intro">Here is an overview of the changes:</p>')+ \ # htmldiff(previous_revision.description, content.description) elif ActionDescription.EDITION == action: call_to_action_text = _('View online') if ContentType.File == content.type: content_intro = _('<span id="content-intro-username">{}</span> updated the file description.').format(actor.display_name) content_text = '<p id="content-body-intro">{}</p>'.format(content.get_label()) + \ content.description if '' == content_intro and content_text == '': # Skip notification, but it's not normal logger.error( self, 'A notification is being sent but no content. ' 'Here are some debug informations: [content_id: {cid}]' '[action: {act}][author: {actor}]'.format( cid=content.content_id, act=action, actor=actor ) ) raise ValueError('Unexpected empty notification') # Import done here because cyclic import from tracim.config.app_cfg import CFG body_content = template.render( base_url=self._global_config.WEBSITE_BASE_URL, _=_, h=helpers, user_display_name=role.user.display_name, user_role_label=role.role_as_label(), workspace_label=role.workspace.label, content_intro=content_intro, content_text=content_text, main_title=main_title, call_to_action_text=call_to_action_text, result = DictLikeClass(item=dictified_item, actor=dictified_actor), CFG=CFG.get_instance(), ) return body_content
def notify_content_update(self, event_actor_id: int, event_content_id: int): """ Look for all users to be notified about the new content and send them an individual email :param event_actor_id: id of the user that has triggered the event :param event_content_id: related content_id :return: """ # FIXME - D.A. - 2014-11-05 # Dirty import. It's here in order to avoid circular import from tracim.lib.content import ContentApi user = UserApi(None).get_one(event_actor_id) logger.debug(self, 'Content: {}'.format(event_content_id)) content = ContentApi(user, show_archived=True, show_deleted=True).get_one(event_content_id, ContentType.Any) # TODO - use a system user instead of the user that has triggered the event main_content = content.parent if content.type==ContentType.Comment else content notifiable_roles = WorkspaceApi(user).get_notifiable_roles(content.workspace) if len(notifiable_roles)<=0: logger.info(self, 'Skipping notification as nobody subscribed to in workspace {}'.format(content.workspace.label)) return logger.info(self, 'Sending asynchronous emails to {} user(s)'.format(len(notifiable_roles))) # INFO - D.A. - 2014-11-06 # The following email sender will send emails in the async task queue # This allow to build all mails through current thread but really send them (including SMTP connection) # In the other thread. # # This way, the webserver will return sooner (actually before notification emails are sent async_email_sender = EmailSender(self._smtp_config, self._global_config.EMAIL_NOTIFICATION_ACTIVATED) for role in notifiable_roles: logger.info(self, 'Sending email to {}'.format(role.user.email)) to_addr = '{name} <{email}>'.format(name=role.user.display_name, email=role.user.email) # # INFO - D.A. - 2014-11-06 # We do not use .format() here because the subject defined in the .ini file # may not include all required labels. In order to avoid partial format() (which result in an exception) # we do use replace and force the use of .__str__() in order to process LazyString objects # subject = self._global_config.EMAIL_NOTIFICATION_CONTENT_UPDATE_SUBJECT subject = subject.replace(EST.WEBSITE_TITLE, self._global_config.WEBSITE_TITLE.__str__()) subject = subject.replace(EST.WORKSPACE_LABEL, main_content.workspace.label.__str__()) subject = subject.replace(EST.CONTENT_LABEL, main_content.label.__str__()) subject = subject.replace(EST.CONTENT_STATUS_LABEL, main_content.get_status().label.__str__()) message = MIMEMultipart('alternative') message['Subject'] = subject message['From'] = self._global_config.EMAIL_NOTIFICATION_FROM message['To'] = to_addr body_text = self._build_email_body(self._global_config.EMAIL_NOTIFICATION_CONTENT_UPDATE_TEMPLATE_TEXT, role, content, user) body_html = self._build_email_body(self._global_config.EMAIL_NOTIFICATION_CONTENT_UPDATE_TEMPLATE_HTML, role, content, user) part1 = MIMEText(body_text, 'plain', 'utf-8') part2 = MIMEText(body_html, 'html', 'utf-8') # Attach parts into message container. # According to RFC 2046, the last part of a multipart message, in this case # the HTML message, is best and preferred. message.attach(part1) message.attach(part2) message_str = message.as_string() asyncjob_perform(async_email_sender.send_mail, message) # s.send_message(message) # Note: The following action allow to close the SMTP connection. # This will work only if the async jobs are done in the right order asyncjob_perform(async_email_sender.disconnect)
def run(self) -> None: logger.info(self, 'Starting MailFetcher') while self._is_active: imapc = None sleep_after_connection = True try: imapc = imapclient.IMAPClient( self.host, self.port, ssl=self.use_ssl, timeout=MAIL_FETCHER_CONNECTION_TIMEOUT) imapc.login(self.user, self.password) logger.debug(self, 'Select folder {}'.format(self.folder, )) imapc.select_folder(self.folder) # force renew connection when deadline is reached deadline = time.time() + self.connection_max_lifetime while True: if not self._is_active: logger.warning(self, 'Mail Fetcher process aborted') sleep_after_connection = False break if time.time() > deadline: logger.debug( self, "MailFetcher Connection Lifetime limit excess" ", Try Re-new connection") sleep_after_connection = False break # check for new mails self._check_mail(imapc) if self.use_idle and imapc.has_capability('IDLE'): # IDLE_mode wait until event from server logger.debug(self, 'wail for event(IDLE)') imapc.idle() imapc.idle_check( timeout=MAIL_FETCHER_IDLE_RESPONSE_TIMEOUT) imapc.idle_done() else: if self.use_idle and not imapc.has_capability('IDLE'): log = 'IDLE mode activated but server do not' \ 'support it, use polling instead.' logger.warning(self, log) # normal polling mode : sleep a define duration logger.debug(self, 'sleep for {}'.format(self.heartbeat)) time.sleep(self.heartbeat) # Socket except (socket.error, socket.gaierror, socket.herror) as e: log = 'Socket fail with IMAP connection {}' logger.error(self, log.format(e.__str__())) except socket.timeout as e: log = 'Socket timeout on IMAP connection {}' logger.error(self, log.format(e.__str__())) # SSL except ssl.SSLError as e: log = 'SSL error on IMAP connection' logger.error(self, log.format(e.__str__())) except ssl.CertificateError as e: log = 'SSL Certificate verification failed on IMAP connection' logger.error(self, log.format(e.__str__())) # Filelock except filelock.Timeout as e: log = 'Mail Fetcher Lock Timeout {}' logger.warning(self, log.format(e.__str__())) # IMAP # TODO - G.M - 10-01-2017 - Support imapclient exceptions # when Imapclient stable will be 2.0+ except BadIMAPFetchResponse as e: log = 'Imap Fetch command return bad response.' \ 'Is someone else connected to the mailbox ?: ' \ '{}' logger.error(self, log.format(e.__str__())) # Others except Exception as e: log = 'Mail Fetcher error {}' logger.error(self, log.format(e.__str__())) finally: # INFO - G.M - 2018-01-09 - Connection closing # Properly close connection according to # https://github.com/mjs/imapclient/pull/279/commits/043e4bd0c5c775c5a08cb5f1baa93876a46732ee # TODO : Use __exit__ method instead when imapclient stable will # be 2.0+ . if imapc: logger.debug(self, 'Try logout') try: imapc.logout() except Exception: try: imapc.shutdown() except Exception as e: log = "Can't logout, connection broken ? {}" logger.error(self, log.format(e.__str__())) if sleep_after_connection: logger.debug(self, 'sleep for {}'.format(self.heartbeat)) time.sleep(self.heartbeat) log = 'Mail Fetcher stopped' logger.debug(self, log)
def teardown_db(): """Destroy the database schema.""" engine = config['tg.app_globals'].sa_engine connection = engine.connect() # INFO - D.A. - 2014-12-04 # Recipe taken from bitbucket: # https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DropEverything inspector = reflection.Inspector.from_engine(engine) metadata = MetaData() tbs = [] all_fks = [] views = [] # INFO - D.A. - 2014-12-04 # Sequences are hard defined here because SQLA does not allow to reflect them from existing schema seqs = [ Sequence('seq__groups__group_id'), Sequence('seq__contents__content_id'), Sequence('seq__content_revisions__revision_id'), Sequence('seq__permissions__permission_id'), Sequence('seq__users__user_id'), Sequence('seq__workspaces__workspace_id') ] for view_name in inspector.get_view_names(): v = Table(view_name, metadata) views.append(v) for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) if not config['sqlalchemy.url'].startswith('sqlite'): for fkc in all_fks: connection.execute(DropConstraint(fkc)) for view in views: drop_statement = 'DROP VIEW {}'.format(view.name) # engine.execute(drop_statement) connection.execute(drop_statement) for table in tbs: connection.execute(DropTable(table)) for sequence in seqs: try: connection.execute(DropSequence(sequence)) except Exception as e: logger.debug( teardown_db, 'Exception while trying to remove sequence {}'.format( sequence.name)) transaction.commit() connection.close() engine.dispose()
def get_one(self, file_id, revision_id=None): file_id = int(file_id) cache_path = CFG.get_instance().PREVIEW_CACHE_DIR preview_manager = PreviewManager(cache_path, create_folder=True) user = tmpl_context.current_user workspace = tmpl_context.workspace current_user_content = Context(CTX.CURRENT_USER, current_user=user).toDict(user) current_user_content.roles.sort(key=lambda role: role.workspace.name) content_api = ContentApi(user, show_archived=True, show_deleted=True) if revision_id: file = content_api.get_one_from_revision(file_id, self._item_type, workspace, revision_id) else: file = content_api.get_one(file_id, self._item_type, workspace) revision_id = file.revision_id file_path = content_api.get_one_revision_filepath(revision_id) nb_page = 0 enable_pdf_buttons = False # type: bool preview_urls = [] try: nb_page = preview_manager.get_page_nb(file_path=file_path) for page in range(int(nb_page)): url_str = '/previews/{}/pages/{}?revision_id={}' url = url_str.format(file_id, page, revision_id) preview_urls.append(url) enable_pdf_buttons = \ preview_manager.has_pdf_preview(file_path=file_path) except PreviewGeneratorException as e: # INFO - A.P - Silently intercepts preview exception # As preview generation isn't mandatory, just register it logger.debug(self, 'Preview Generator Exception: {}'.format(e.__str__)) except Exception as e: # INFO - D.A - 2017-08-11 - Make Tracim robust to pg exceptions # Preview generator may potentially raise any type of exception # so we prevent user interface crashes by catching all exceptions logger.error( self, 'Preview Generator Generic Exception: {}'.format(e.__str__)) pdf_available = 'true' if enable_pdf_buttons else 'false' # type: str fake_api_breadcrumb = self.get_breadcrumb(file_id) fake_api_content = DictLikeClass(breadcrumb=fake_api_breadcrumb, current_user=current_user_content) fake_api = Context(CTX.FOLDER, current_user=user)\ .toDict(fake_api_content) dictified_file = Context(self._get_one_context, current_user=user).toDict(file, 'file') result = DictLikeClass(result=dictified_file, fake_api=fake_api, nb_page=nb_page, url=preview_urls, pdf_available=pdf_available) return result
def notify_created_account( self, user: User, password: str, ) -> None: """ Send created account email to given user :param password: choosed password :param user: user to notify """ # TODO BS 20160712: Cyclic import from tracim.lib.notifications import EST logger.debug(self, 'user: {}'.format(user.user_id)) logger.info(self, 'Sending asynchronous email to 1 user ({0})'.format( user.email, )) async_email_sender = EmailSender( self._smtp_config, self._global_config.EMAIL_NOTIFICATION_ACTIVATED ) subject = \ self._global_config.EMAIL_NOTIFICATION_CREATED_ACCOUNT_SUBJECT \ .replace( EST.WEBSITE_TITLE, self._global_config.WEBSITE_TITLE.__str__() ) message = MIMEMultipart('alternative') message['Subject'] = subject message['From'] = '{0} <{1}>'.format( self._global_config.EMAIL_NOTIFICATION_FROM_DEFAULT_LABEL, self._global_config.EMAIL_NOTIFICATION_FROM_EMAIL, ) message['To'] = user.email text_template_file_path = self._global_config.EMAIL_NOTIFICATION_CREATED_ACCOUNT_TEMPLATE_TEXT # nopep8 html_template_file_path = self._global_config.EMAIL_NOTIFICATION_CREATED_ACCOUNT_TEMPLATE_HTML # nopep8 body_text = self._render( mako_template_filepath=text_template_file_path, context={ 'user': user, 'password': password, 'login_url': self._global_config.WEBSITE_BASE_URL, } ) body_html = self._render( mako_template_filepath=html_template_file_path, context={ 'user': user, 'password': password, 'login_url': self._global_config.WEBSITE_BASE_URL, } ) part1 = MIMEText(body_text, 'plain', 'utf-8') part2 = MIMEText(body_html, 'html', 'utf-8') # Attach parts into message container. # According to RFC 2046, the last part of a multipart message, # in this case the HTML message, is best and preferred. message.attach(part1) message.attach(part2) send_email_through(async_email_sender.send_mail, message)
def setUp(self): self.app = load_app(self.application_under_test) logger.debug(self, 'Start setUp() by trying to clean database...') try: teardown_db() except Exception as e: logger.debug(self, 'teardown() throwed an exception {}'.format(e.__str__())) logger.debug(self, 'Start setUp() by trying to clean database... -> done') logger.debug(self, 'Start Application Setup...') setup_app() logger.debug(self, 'Start Application Setup... -> done') logger.debug(self, 'Start Database Setup...') setup_db() logger.debug(self, 'Start Database Setup... -> done') self.app.get('/_test_vars') # Allow to create fake context tg.i18n.set_lang('en') # Set a default lang
def setUp(self): self.app = load_app(self.application_under_test) logger.debug(self, 'Start setUp() by trying to clean database...') try: teardown_db() except Exception as e: logger.debug( self, 'teardown() throwed an exception {}'.format(e.__str__())) logger.debug(self, 'Start setUp() by trying to clean database... -> done') logger.debug(self, 'Start Application Setup...') setup_app() logger.debug(self, 'Start Application Setup... -> done') logger.debug(self, 'Start Database Setup...') setup_db() logger.debug(self, 'Start Database Setup... -> done') logger.debug(self, 'Load extra fixtures...') fixtures_loader = FixturesLoader( [BaseFixture]) # BaseFixture is already loaded in bootstrap fixtures_loader.loads(self.fixtures) logger.debug(self, 'Load extra fixtures... -> done') self.app.get('/_test_vars') # Allow to create fake context tg.i18n.set_lang([ 'en', ]) # Set a default lang
def wrapper_func(*args, **kwargs): start = time.time() retval = func(*args, **kwargs) end = time.time() logger.debug(func, 'exec time: {} seconds'.format(end-start)) return retval
def _build_email_body(self, mako_template_filepath: str, role: UserRoleInWorkspace, content: Content, actor: User) -> str: """ Build an email body and return it as a string :param mako_template_filepath: the absolute path to the mako template to be used for email body building :param role: the role related to user to whom the email must be sent. The role is required (and not the user only) in order to show in the mail why the user receive the notification :param content: the content item related to the notification :param actor: the user at the origin of the action / notification (for example the one who wrote a comment :param config: the global configuration :return: the built email body as string. In case of multipart email, this method must be called one time for text and one time for html """ logger.debug( self, 'Building email content from MAKO template {}'.format( mako_template_filepath)) template = Template(filename=mako_template_filepath) # TODO - D.A. - 2014-11-06 - move this # Import is here for circular import problem import tracim.lib.helpers as helpers dictified_item = Context( CTX.EMAIL_NOTIFICATION, self._global_config.WEBSITE_BASE_URL).toDict(content) dictified_actor = Context(CTX.DEFAULT).toDict(actor) main_title = dictified_item.label content_intro = '' content_text = '' call_to_action_text = '' action = content.get_last_action().id if ActionDescription.COMMENT == action: content_intro = l_( '<span id="content-intro-username">{}</span> added a comment:' ).format(actor.display_name) content_text = content.description call_to_action_text = l_('Answer') elif ActionDescription.CREATION == action: # Default values (if not overriden) content_text = content.description call_to_action_text = l_('View online') if ContentType.Thread == content.type: call_to_action_text = l_('Answer') content_intro = l_( '<span id="content-intro-username">{}</span> started a thread entitled:' ).format(actor.display_name) content_text = '<p id="content-body-intro">{}</p>'.format(content.label) + \ content.get_last_comment_from(actor).description elif ContentType.File == content.type: content_intro = l_( '<span id="content-intro-username">{}</span> added a file entitled:' ).format(actor.display_name) if content.description: content_text = content.description else: content_text = '<span id="content-body-only-title">{}</span>'.format( content.label) elif ContentType.Page == content.type: content_intro = l_( '<span id="content-intro-username">{}</span> added a page entitled:' ).format(actor.display_name) content_text = '<span id="content-body-only-title">{}</span>'.format( content.label) elif ActionDescription.REVISION == action: content_text = content.description call_to_action_text = l_('View online') if ContentType.File == content.type: content_intro = l_( '<span id="content-intro-username">{}</span> uploaded a new revision.' ).format(actor.display_name) content_text = '' elif ContentType.Page == content.type: content_intro = l_( '<span id="content-intro-username">{}</span> updated this page.' ).format(actor.display_name) previous_revision = content.get_previous_revision() title_diff = '' if previous_revision.label != content.label: title_diff = htmldiff(previous_revision.label, content.label) content_text = str(l_('<p id="content-body-intro">Here is an overview of the changes:</p>'))+ \ title_diff + \ htmldiff(previous_revision.description, content.description) elif ContentType.Thread == content.type: content_intro = l_( '<span id="content-intro-username">{}</span> updated the thread description.' ).format(actor.display_name) previous_revision = content.get_previous_revision() title_diff = '' if previous_revision.label != content.label: title_diff = htmldiff(previous_revision.label, content.label) content_text = str(l_('<p id="content-body-intro">Here is an overview of the changes:</p>'))+ \ title_diff + \ htmldiff(previous_revision.description, content.description) # elif ContentType.Thread == content.type: # content_intro = l_('<span id="content-intro-username">{}</span> updated this page.').format(actor.display_name) # previous_revision = content.get_previous_revision() # content_text = l_('<p id="content-body-intro">Here is an overview of the changes:</p>')+ \ # htmldiff(previous_revision.description, content.description) elif ActionDescription.EDITION == action: call_to_action_text = l_('View online') if ContentType.File == content.type: content_intro = l_( '<span id="content-intro-username">{}</span> updated the file description.' ).format(actor.display_name) content_text = '<p id="content-body-intro">{}</p>'.format(content.get_label()) + \ content.description elif ActionDescription.STATUS_UPDATE == action: call_to_action_text = l_('View online') intro_user_msg = l_('<span id="content-intro-username">{}</span> ' 'updated the following status:') content_intro = intro_user_msg.format(actor.display_name) intro_body_msg = '<p id="content-body-intro">{}: {}</p>' content_text = intro_body_msg.format( content.get_label(), content.get_status().label, ) if '' == content_intro and content_text == '': # Skip notification, but it's not normal logger.error( self, 'A notification is being sent but no content. ' 'Here are some debug informations: [content_id: {cid}]' '[action: {act}][author: {actor}]'.format( cid=content.content_id, act=action, actor=actor)) raise ValueError('Unexpected empty notification') # Import done here because cyclic import from tracim.config.app_cfg import CFG body_content = template.render( base_url=self._global_config.WEBSITE_BASE_URL, _=l_, h=helpers, user_display_name=role.user.display_name, user_role_label=role.role_as_label(), workspace_label=role.workspace.label, content_intro=content_intro, content_text=content_text, main_title=main_title, call_to_action_text=call_to_action_text, result=DictLikeClass(item=dictified_item, actor=dictified_actor), CFG=CFG.get_instance(), ) return body_content
def notify_created_account( self, user: User, password: str, ) -> None: """ Send created account email to given user :param password: choosed password :param user: user to notify """ # TODO BS 20160712: Cyclic import from tracim.lib.notifications import EST logger.debug(self, 'user: {}'.format(user.user_id)) logger.info(self, 'Sending asynchronous email to 1 user ({0})'.format( user.email, )) async_email_sender = EmailSender( self._smtp_config, self._global_config.EMAIL_NOTIFICATION_ACTIVATED ) subject = \ self._global_config.EMAIL_NOTIFICATION_CREATED_ACCOUNT_SUBJECT \ .replace( EST.WEBSITE_TITLE, self._global_config.WEBSITE_TITLE.__str__() ) message = MIMEMultipart('alternative') message['Subject'] = subject message['From'] = self._global_config.EMAIL_NOTIFICATION_FROM message['To'] = user.email text_template_file_path = self._global_config.EMAIL_NOTIFICATION_CREATED_ACCOUNT_TEMPLATE_TEXT # nopep8 html_template_file_path = self._global_config.EMAIL_NOTIFICATION_CREATED_ACCOUNT_TEMPLATE_HTML # nopep8 body_text = self._render( mako_template_filepath=text_template_file_path, context={ 'user': user, 'password': password, 'login_url': self._global_config.WEBSITE_BASE_URL, } ) body_html = self._render( mako_template_filepath=html_template_file_path, context={ 'user': user, 'password': password, 'login_url': self._global_config.WEBSITE_BASE_URL, } ) part1 = MIMEText(body_text, 'plain', 'utf-8') part2 = MIMEText(body_html, 'html', 'utf-8') # Attach parts into message container. # According to RFC 2046, the last part of a multipart message, # in this case the HTML message, is best and preferred. message.attach(part1) message.attach(part2) asyncjob_perform(async_email_sender.send_mail, message) # Note: The following action allow to close the SMTP connection. # This will work only if the async jobs are done in the right order asyncjob_perform(async_email_sender.disconnect)
def _fetch(self) -> typing.List[MessageContainer]: """ Get news message from mailbox :return: list of new mails """ messages = [] # select mailbox logger.debug(self, 'Fetch messages from folder {}'.format(self.folder, )) rv, data = self._connection.select(self.folder) logger.debug(self, 'Response status {}'.format(rv, )) if rv == 'OK': # get mails # TODO - G.M - 2017-11-15 Which files to select as new file ? # Unseen file or All file from a directory (old one should be # moved/ deleted from mailbox during this process) ? logger.debug(self, 'Fetch unseen messages') rv, data = self._connection.search(None, "(UNSEEN)") logger.debug(self, 'Response status {}'.format(rv, )) if rv == 'OK': # get mail content logger.debug( self, 'Found {} unseen mails'.format(len(data[0].split()), )) for uid in data[0].split(): # INFO - G.M - 2017-12-08 - Fetch BODY.PEEK[] # Retrieve all mail(body and header) but don't set mail # as seen because of PEEK # see rfc3501 logger.debug(self, 'Fetch mail "{}"'.format(uid, )) rv, data = self._connection.fetch(uid, 'BODY.PEEK[]') logger.debug(self, 'Response status {}'.format(rv, )) if rv == 'OK': msg = message_from_bytes(data[0][1]) msg_container = MessageContainer(msg, uid) messages.append(msg_container) self._set_flag(uid, IMAP_SEEN_FLAG) else: log = 'IMAP : Unable to get mail : {}' logger.error(self, log.format(str(rv))) else: log = 'IMAP : Unable to get unseen mail : {}' logger.error(self, log.format(str(rv))) else: log = 'IMAP : Unable to open mailbox : {}' logger.error(self, log.format(str(rv))) return messages