def test_message_queue(self): message_user(self.user, "First Message") message_user(self.user, "Second Message") response = self.client.get('/') msgs = list(response.context['messages']) self.assertEqual(2, len(msgs)) self.assertEqual('Second Message', str((msgs)[1]))
def liberation_finish(result, options): """ Create email to send to user """ user = get_user_model().objects.get(id=options['user']) lib_status = user.liberation lib_status.flags.running = False lib_status.payload = open(result, "r").read() lib_status.last_finished = datetime.now(utc) lib_status.content_type = int(options.get('compression_type', '0')) lib_status.save() os.remove(result) message = _( "Your request for your personal data has been completed. Click <a class=\"alert-link\" href=\"%s\">here</a> to download your archive." ) message_user( user, safestring.mark_safe(message % urlresolvers.reverse("user-liberate-get"))) log.info("Finished liberation for %s", options['user']) # run a garbage collection on all workers - liberation is leaky tasks.force_garbage_collection.delay()
def test_message_queue(self): message_user(self.user, "First Message") message_user(self.user, "Second Message") response = self.client.get('/') msgs = list(response.context['messages']) self.assertEqual(2, len((msgs))) self.assertEqual('Second Message', str((msgs)[1]))
def debug(user, message): """ Adds a message with the ``DEBUG`` level. :param user: User instance :param message: Message to show """ message_user(user, message, constants.DEBUG)
def error(user, message): """ Adds a message with the ``ERROR`` level. :param user: User instance :param message: Message to show """ message_user(user, message, constants.ERROR)
def warning(user, message): """ Adds a message with the ``WARNING`` level. :param user: User instance :param message: Message to show """ message_user(user, message, constants.WARNING)
def success(user, message): """ Adds a message with the ``SUCCESS`` level. :param user: User instance :param message: Message to show """ message_user(user, message, constants.SUCCESS)
def info(user, message): """ Adds a message with the ``INFO`` level. :param user: User instance :param message: Message to show """ message_user(user, message, constants.INFO)
def task_api_update_products(userid): try: user = User.objects.get(pk=userid) pfCatalogProduct.api_pull() message_user(user, 'Success! Product Catalog has been updated.', constants.SUCCESS) except Exception as e: message_user(user, 'API call failed. {}'.format(e), constants.ERROR)
def task_api_update_geos(userid): try: user = User.objects.get(pk=userid) pfCountry.api_pull() message_user(user, "Success! Geographic data has been updated.", constants.SUCCESS) except Exception as e: message_user(user, "API call failed. {}".format(e), constants.ERROR)
def test_anonymous_message(self): client = Client() user = auth.get_user(client) with self.assertRaises(AsyncMessageException) as e: message_user(user, "Second Message") self.assertEqual(str(e.exception), 'Anonymous users cannot send messages.')
def import_from_admin(indicators_file, model, user): if invalid_indicators_csv(indicators_file, model): invalid_msg = 'El csv de indicadores es inválido. Correr el comando ' \ 'validate_indicators_csv para un reporte detallado' message_user(user, invalid_msg, constants.WARNING) return valid_msg = 'El csv de indicadores es válido, en breve iniciará' \ ' el proceso de importación' message_user(user, valid_msg, constants.SUCCESS) import_indicators_tempfile.delay(indicators_file, model, user)
def import_indicators_tempfile(indicators_file, model, user): try: import_indicators(indicators_file, model) msg = 'Los indicadores fueron importados con éxito' status = constants.SUCCESS except Exception: msg = 'Ocurrió un problema importando los indicadores' status = constants.ERROR finally: os.remove(indicators_file) message_user(user, msg, status)
def post_run(self, report, **kwargs): super(DjangoReportHandler, self).post_run(report) user = kwargs.get("user", None) self.report_tracking.status = report.status self.report_tracking.report_file = self.report.filename self.report_tracking.data = self.report.parameters self.report_tracking.user = user self.report_tracking.save() if self.report_tracking.user: url = self.report_tracking.get_absolute_url() message_user(self.report_tracking.user, "Your report %s is ready <a href='%s'>here</a>" % (self.report.name, url))
def check_feeds(self, force=False, commit=True): """ Check the twitter account timeline is a 1flow feed. """ # Avoid circular import from ..feed.twitter import TwitterFeed from ..subscription import subscribe_user_to_feed if self.timeline is None: # TODO: merge this with # self._create_feed_from_list_and_subscribe_to_it() # only the get_list() and the is_timeline differ. username = self.social_auth.extra_data.get( 'username', self.social_auth.uid) timeline = TwitterFeed( user=self.user, name=_(u'Twitter timeline for @{0}').format(username), is_timeline=True, is_backfilled=True, is_good=True, ) timeline.save() # Mandatory to check the timeline of this SOLE # account, and to be able to get it back afterwise. timeline.account.add(self) self.timeline = timeline self.save() LOGGER.info(u'%s: created timeline feed %s.', self, self.timeline) message_user( self.user, _(u'Subscribed you to @{0}\'s Twitter timeline.').format( username), constants.SUCCESS) timeline_subscription = subscribe_user_to_feed(user=self.user, feed=self.timeline) if not timeline_subscription.folders.exists(): timeline_subscription.folders.add(self.twitter_folder) self.check_lists(force=force, commit=commit)
def check_feeds(self, force=False, commit=True): """ Check the twitter account timeline is a 1flow feed. """ # Avoid circular import from ..feed.twitter import TwitterFeed from ..subscription import subscribe_user_to_feed if self.timeline is None: # TODO: merge this with # self._create_feed_from_list_and_subscribe_to_it() # only the get_list() and the is_timeline differ. username = self.social_auth.extra_data.get('username', self.social_auth.uid) timeline = TwitterFeed( user=self.user, name=_(u'Twitter timeline for @{0}').format(username), is_timeline=True, is_backfilled=True, is_good=True, ) timeline.save() # Mandatory to check the timeline of this SOLE # account, and to be able to get it back afterwise. timeline.account.add(self) self.timeline = timeline self.save() LOGGER.info(u'%s: created timeline feed %s.', self, self.timeline) message_user( self.user, _(u'Subscribed you to @{0}\'s Twitter timeline.').format( username), constants.SUCCESS) timeline_subscription = subscribe_user_to_feed(user=self.user, feed=self.timeline) if not timeline_subscription.folders.exists(): timeline_subscription.folders.add(self.twitter_folder) self.check_lists(force=force, commit=commit)
def run(self): """ Run the import. """ # # NOTE: we don't care if the import was already running, finished, # whatever. This class is able to recover and re-run itself # over and over without doing bad thing in the database. # is_retrying = self.status == IMPORT_STATUS.RETRY self.status = IMPORT_STATUS.RUNNING self.date_started = now() self.save() try: return self.run_internal() except: LOGGER.exception(u'User import %s failed') if is_retrying: message_user(self.user, _(u'Your import #{0} failed to run after a ' u'retry. Please review it before relaunching ' u'it manually again.').format(self.id), constants.ERROR) self.status = IMPORT_STATUS.FAILED else: countdown = randrange(1800, 3600) delta_cd = naturaldelta(timedelta(seconds=countdown)) message_user(self.user, _(u'Your import #{0} failed to run. If will ' u'be automatically retried in {1}').format( self.id, delta_cd), constants.WARNING) globals()['userimport_run_task'].apply_async( (self.id, ), countdown=countdown) self.status = IMPORT_STATUS.RETRY self.save()
def liberation_finish(result, options): """ Create email to send to user """ user = get_user_model().objects.get(id=options['user']) lib_status = user.liberation lib_status.flags.running = False lib_status.last_finished = datetime.now(utc) lib_status.content_type = int(options.get('compression_type', '0')) lib_status.save() message = _("Your request for your personal data has been completed. Click <a class=\"alert-link\" href=\"%s\">here</a> to download your archive.") message_user(user, safestring.mark_safe(message % urlresolvers.reverse("user-liberate-get"))) log.info("Finished liberation for %s", options['user']) # run a garbage collection on all workers - liberation is leaky tasks.force_garbage_collection.delay()
def mine(self, userid): """ This filters by the user's group """ qs = super(DorsaleGroupSiteManager, self).mine(userid) # : queryset if not self.user: return qs # user = User.objects.get(pk=userid) # check the group field if self.user.is_active \ and not self.user.is_superuser \ and qs.count() > 0 \ and self.__group_field_name: if not self.__group_is_checked: # We don't check if group_field exists to allow chains like 'product__group' group_count = self.user.groups.count() if group_count == 0: logger.error(_("User %s doesn’t belong to any group!") % self.user.username) if ASYNC_MESSAGES: message_user(self.user, _("You do not yet belong to any groups. Ask your administrator to add you to one."), constants.ERROR) self.__group_is_checked = True # filter on the user's groups qs = qs.filter(**{self.__group_field_name + '__in':self.user.groups.all()}) return qs
except Exception, e: self.mark_unusable(u'Get Twitter account/settings failed', exc=e) else: dict_result = result.json() # {'limit': None, 'remaining': 14, 'reset': None} quota = result.get_rest_quota() self.mark_usable() if settings.DEBUG and 'screen_name' in dict_result: message_user( self.user, _(u'Twitter account successfully tested (quota ' u'remaining: {0}).').format(quota['remaining']), constants.INFO) def update_lists(self): """ Simply update the remote lists names. .. note:: this method is registered as a task in Celery. """ if not self.recently_usable: return self._lists_ = twitteraccount_lists_default(self) LOGGER.debug(u'%s: Twitter lists updated; owned: %s, subscribed: %s.',
def message_admins(*args, **kwargs): """ Asynchronously send messages to all administrators. :param preference: if set, the corresponding preference of each admin user is checked to see if he/she wants to receive the message or not. :type preference: str, unicode or None (default) :param timestamping: if ``True``, a timestamp is prepended to the message. The timestamp will be translated in the user langage and timezone. :type timestamping: bool (default: ``True``) """ timestamping = kwargs.pop('timestamping', True) preference = kwargs.pop('preference', None) admins = User.objects.filter(is_active=True).filter( Q(is_superuser=True) | Q(is_staff=True)).filter( preferences__staff__super_powers_enabled=True) if preference is not None: query_kwargs = {'preference__staff__{}'.format(preference): True} admins.filter(**query_kwargs) if timestamping: args = list(args) # This is an UTC datetime. dtnow = now() initial_lang = translation.get_language() prev_lang = initial_lang for admin in admins: uargs = args[:] # default datetime is UTC. udtnow = dtnow try: if admin.account.timezone: udtnow = dtnow.astimezone(pytz.timezone( admin.account.timezone)) if admin.account.language: if prev_lang != admin.account.language: translation.activate(admin.account.language) prev_lang = admin.account.language uargs[0] = _(u'{}: {}').format( formats.date_format(udtnow, 'SHORT_DATETIME_FORMAT'), uargs[0]) except ObjectDoesNotExist: # Mostly "user has no account" LOGGER.warning('Admin %s has no account ?', admin) message_user(admin, *uargs, **kwargs) if initial_lang is not None: translation.activate(initial_lang) else: message_users(admins, *args, **kwargs)
def check_lists(self, force=False, commit=True): """ Check Twitter account's lists are local 1flow feeds. Local twitter feeds will be created if constance config or user preferences say so, and existing local feeds will be deleted if present but the local user doesn't want them anymore. """ # https://dev.twitter.com/rest/reference/get/lists/subscriptions # # TODO: the deletion process is overkill: it will create # subscriptions to delete them afterwards. We should # optimize. But BTW, creating [closed] feeds for all # lists seems a good thing, at first sight. # lists = self.lists if lists is None: LOGGER.warning(u'Lists are outdated or inexistant. ' u'Please refresh them first (the task ' u'should be already underway).') return created_lists = [] deleted_lists = [] username = self.social_auth.extra_data.get('username', self.social_auth.uid) if self.fetch_owned_lists: for list_name in lists.owned: feed, subscription, created = \ self._create_feed_from_list_and_subscribe_to_it( username, list_name) if created: created_lists.append(list_name) else: for list_name in lists.owned: feed, subscription, created = \ self._create_feed_from_list_and_subscribe_to_it( username, list_name) feed.close(u'Owner does not fetch owned lists anymore') subscription.delete() deleted_lists.append(list_name) if self.fetch_subscribed_lists: for list_name in lists.subscribed: feed, subscription, created = \ self._create_feed_from_list_and_subscribe_to_it( username, list_name) if created: created_lists.append(list_name) else: for list_name in lists.subscribed: feed, subscription, created = \ self._create_feed_from_list_and_subscribe_to_it( username, list_name) feed.close(u'Owner does not fetch subscribed lists anymore') subscription.delete() deleted_lists.append(list_name) tlsfu = twitter_list_slug_from_uri if created_lists: message_user( self.user, _(u'Subscribed you to {0} Twitter list(s): {1}.').format( len(created_lists), u', '.join(tlsfu(l) for l in created_lists)), constants.SUCCESS) if deleted_lists: # Given the way checks work, don't bother the user with messages # he/she didn't asked for. Eg. if lists fetching is disabled, no # need to tell him/her he/she is unsubscribed, because he/she # should already be. if config.TWITTER_ACCOUNT_FETCH_OWNED_LISTS \ or config.TWITTER_ACCOUNT_FETCH_SUBSCRIBED_LISTS: message_user( self.user, _(u'Unsubscribed from {0} Twitter list(s): {1}.').format( len(deleted_lists), u', '.join(tlsfu(l) for l in deleted_lists)), constants.INFO)
def get(self, request): task_api_update_geos.delay(request.user.pk) message_user(request.user, 'Starting update of geographic data.', constants.INFO) return redirect('business:app_list_geo_list')
def import_from_one_url(self, url, origin=None): """ Guess if an URL is a feed or an article and import it. """ # —————————————————————————————————————— Try to create an RSS/Atom Feed feeds = None try: feeds = create_feeds_from_url(url) except FeedIsHtmlPageException: # This is expected if we are importing web pages URLs. pass except Exception as e: LOGGER.exception(u'Exception occured while trying to create ' u'feed(s) from %s', url) if feeds: imported_item_was_a_feed_url = False for feed, created in feeds: if feed.url == url: imported_item_was_a_feed_url = True break if imported_item_was_a_feed_url: self._import_created_['feeds'].append(url) # Subscribe the user to the feed, and don't # try to import an article from the URL. for feed, created in feeds: subscribe_user_to_feed(user=self.user, feed=feed, background=True) if created: message_user(self.user, _(u'Successfully subscribed to new feed ' u'“{0}”. Thank you!').format(feed.name), constants.INFO) else: message_user(self.user, _(u'Successfully subscribed to feed ' u'“{0}”.').format(feed.name), constants.INFO) return None else: self._import_created_['discovered'] = [ feed.url for feed, created in feeds if created ] # ———————————————————————————————————————————— Try to create an article article = None try: article, created = create_item_from_url( url, feeds=[self.user.user_feeds.imported_items], origin=origin ) # create_item_from_url() will run subscription.create_read() # for all feeds, thus the read is assumed to be ready now. read = Read.objects.get(item=article, user=self.user) read.mark_archived() except Exception as e: LOGGER.exception(u'Could not create article from URL %s', url) self._import_failed_.append((url, unicode(e))) else: self._import_created_['articles'].append(url) message_user(self.user, _(u'Successfully imported article ' u'“{0}”.').format(article.name), constants.INFO) return article
def guess_and_import_wallabag(self): """ Try to import a JSON export file from wallabag. """ try: wallabag_json = json.loads(self.urls) except: return False try: first_object = wallabag_json[0] except: return False for attr_name in ( "0", "1", "2", "3", "4", "5", "6", "content", "id", "is_fav", "is_read", "title", "url", "user_id", ): if attr_name not in first_object: return False message_user(self.user, _(u'Wallabag JSON export format detected.'), constants.INFO) for wallabag_object in wallabag_json: url = wallabag_object['url'] if self.validate_url(url): article = self.import_from_one_url( url, origin=ORIGINS.WALLABAG ) if article is None: # article was not created, we # already have it in the database. article = Article.objects.get(url=url) # Now comes the wallabag-specific part of the import, # eg. get back user meta-data as much as possible in 1flow. article_needs_save = False article_needs_convert = False title = wallabag_object.get('title', None) if title: article.name = title article_needs_save = True content = wallabag_object['content'] if content: article.content = content article.content_type = CONTENT_TYPES.HTML article_needs_save = True article_needs_convert = True if article_needs_save: article.save() if article_needs_convert: article.convert_to_markdown() read = article.reads.get( subscriptions=self.user.user_subscriptions.imported_items) # About parsing dates: # http://stackoverflow.com/q/127803/654755 # http://stackoverflow.com/a/18150817/654755 read_needs_save = False if wallabag_object.get('is_fav', False): read.is_starred = True read_needs_save = True # This information is not in wallabag. read.date_starred = now() if wallabag_object.get('is_read', False): read.is_read = True read_needs_save = True # This information is not in wallabag. read.date_read = now() if read_needs_save: read.save() return True
def guess_and_import_readability(self): """ Guess if our content is a readability file, then import it. """ try: readability_json = json.loads(self.urls) except: return False try: first_object = readability_json[0] except: return False for attr_name in ("article__title", "article__excerpt", "article__url", "date_added", "favorite", "date_favorited", "archive", "date_archived"): if attr_name not in first_object: return False message_user(self.user, _(u'Readability JSON export format detected.'), constants.INFO) for readability_object in readability_json: url = readability_object['article__url'] if self.validate_url(url): article = self.import_from_one_url( url, origin=ORIGINS.READABILITY ) if article is None: # article was not created, we # already have it in the database. article = Article.objects.get(url=url) # # Now comes the readability-specific part of the import, # eg. get back user meta-data as much as possible in 1flow. # article_needs_save = False if readability_object['article__title']: article.name = readability_object['article__title'] article_needs_save = True if readability_object['article__excerpt']: article.excerpt = readability_object['article__excerpt'] article_needs_save = True if article_needs_save: article.save() read = article.reads.get( subscriptions=self.user.user_subscriptions.imported_items) # About parsing dates: # http://stackoverflow.com/q/127803/654755 # http://stackoverflow.com/a/18150817/654755 read_needs_save = False date_added = readability_object['date_added'] if date_added: # We try to keep the history of date when the # user added this article to readability. try: read.date_created = date_parser.parse(date_added) except: LOGGER.exception(u'Parsing creation date "%s" for ' u'read #%s failed.', date_added, read.id) else: read_needs_save = True if readability_object['favorite']: read.is_starred = True read_needs_save = True date_favorited = readability_object['date_favorited'] if date_favorited: try: read.date_starred = date_parser.parse( date_favorited) except: LOGGER.exception(u'Parsing favorited date "%s" ' u'for read #%s failed.', date_favorited, read.id) if read_needs_save: read.save() return True
def test_message_appears_for_user(self): message_user(self.user, "Hello") response = self.client.get('/') msgs = list(response.context['messages']) self.assertEqual(1, len(msgs)) self.assertEqual('Hello', str((msgs)[0]))
def check_lists(self, force=False, commit=True): """ Check Twitter account's lists are local 1flow feeds. Local twitter feeds will be created if constance config or user preferences say so, and existing local feeds will be deleted if present but the local user doesn't want them anymore. """ # https://dev.twitter.com/rest/reference/get/lists/subscriptions # # TODO: the deletion process is overkill: it will create # subscriptions to delete them afterwards. We should # optimize. But BTW, creating [closed] feeds for all # lists seems a good thing, at first sight. # lists = self.lists if lists is None: LOGGER.warning(u'Lists are outdated or inexistant. ' u'Please refresh them first (the task ' u'should be already underway).') return created_lists = [] deleted_lists = [] username = self.social_auth.extra_data.get( 'username', self.social_auth.uid) if self.fetch_owned_lists: for list_name in lists.owned: feed, subscription, created = \ self._create_feed_from_list_and_subscribe_to_it( username, list_name) if created: created_lists.append(list_name) else: for list_name in lists.owned: feed, subscription, created = \ self._create_feed_from_list_and_subscribe_to_it( username, list_name) feed.close(u'Owner does not fetch owned lists anymore') subscription.delete() deleted_lists.append(list_name) if self.fetch_subscribed_lists: for list_name in lists.subscribed: feed, subscription, created = \ self._create_feed_from_list_and_subscribe_to_it( username, list_name) if created: created_lists.append(list_name) else: for list_name in lists.subscribed: feed, subscription, created = \ self._create_feed_from_list_and_subscribe_to_it( username, list_name) feed.close(u'Owner does not fetch subscribed lists anymore') subscription.delete() deleted_lists.append(list_name) tlsfu = twitter_list_slug_from_uri if created_lists: message_user( self.user, _(u'Subscribed you to {0} Twitter list(s): {1}.').format( len(created_lists), u', '.join( tlsfu(l) for l in created_lists)), constants.SUCCESS) if deleted_lists: # Given the way checks work, don't bother the user with messages # he/she didn't asked for. Eg. if lists fetching is disabled, no # need to tell him/her he/she is unsubscribed, because he/she # should already be. if config.TWITTER_ACCOUNT_FETCH_OWNED_LISTS \ or config.TWITTER_ACCOUNT_FETCH_SUBSCRIBED_LISTS: message_user( self.user, _(u'Unsubscribed from {0} Twitter list(s): {1}.').format( len(deleted_lists), u', '.join( tlsfu(l) for l in deleted_lists)), constants.INFO)
def test_message_appears_for_user(self): message_user(self.user, "Hello") response = self.client.get('/') messages = list(response.context['messages']) self.assertEqual(1, len(messages)) self.assertEqual('Hello', str(messages[0]))
def get(self, request): task_api_update_products.delay(request.user.pk) message_user(request.user, 'Starting product catalog update.', constants.INFO) return redirect('business:app_list_cprod_list')