Exemplo n.º 1
0
def check_for_sending(request):
    """ TODO: UT needed."""
    one_week_ago = now()  - datetime.timedelta(days=7)
    news_list = HackerNews.objects.filter(added__gt=one_week_ago, sent=False)
    count_file = 0
    count_email = 0
    for news in news_list:
        sr_list = SendRecord.objects.filter(news=news, sent=False)[:EMAIL_COUNT_LIMIT]
        if len(sr_list) == 0:
            news.sent = True
            news.save()
            continue

        receivers = [x.email for x in sr_list]
        try:
            send_files_to([news.file_path], receivers, subject=news.title)
            count_file += 1
            count_email += len(receivers)
        except Exception, e:
            info = ("send mail failed. Exception: %s File: %s" %
                    (e, news.file_path))
            logger.error(info)
        else:
            for sr in sr_list:
                sr.sent = True
                sr.save()
            time.sleep(0.3)
Exemplo n.º 2
0
def weekly_sending(request):
    info = ""
    date_now = now()
    week_number = date_now.isocalendar()[1] - 1
    try:
        weekly = Weekly.objects.get(week_number=week_number)
    except Weekly.DoesNotExist:
        info = "This Weekly Does not exist"
        return HttpResponse(info + "\n")

    if not weekly.file_path:
        info = "No file for this Weekly"
        return HttpResponse(info + "\n")

    receivers = WeeklySendRecord.objects.filter(weekly=weekly, sent=False)[:EMAIL_COUNT_LIMIT]
    emails = [x.email for x in receivers]
    if len(emails) == 0:
        info = "Weekly sent complete."
        return HttpResponse(info + "\n")

    try:
        subject = "Hacker News Weekly %s" % week_number
        send_files_to([weekly.file_path], emails, subject=subject)
        for item in receivers:
            item.sent = True
            item.save()
    except Exception, e:
        info = "send weekly mail failed. Exception: %s Emails: %s" % (e, emails)
        logger.error(info)
Exemplo n.º 3
0
def check(request):
    api = get_twitter_private_api()
    try:
        messages = api.GetHomeTimeline(count=100)
    except:
        logger.error("Error when GetHomeTimeline when checking Notes")
        return HttpResponse("Error.")
    info = []
    for msg in messages:
        if "#Kindle" not in msg.text or 'http' not in msg.text:
            continue
        url = re.search(r'(http[^ ]+)', msg.text).group(1)
        if Note.objects.filter(url=url).exists() or Word.objects.filter(url=url).exists():
            continue
        user = get_user_from_twitter_id(msg.user.screen_name)
        if not user:
            continue
        added = datetime.datetime(*rfc822.parsedate(msg.created_at)[:6])
        info.append((user, url, added, msg.id))
    count = 0
    for user, url, added, tweet_id in info:
        save_note(user, url, added, tweet_id)
        count +=  1
    return HttpResponse("%s\n" % count)
Exemplo n.º 4
0
    def update_news(self, article_list):
        """
        Update HackerNews records
        """
        count_created = count_updated = count_filed = 0
        for article in article_list:
            if self.filter(url=article['url']).exists():
                news = self.get(url=article['url'])

                # Update the points of the article, send the points update signal
                if news.filed and news.file_path and article['points'] > news.points:
                    pre_points = news.points
                    news.points = article['points']
                    news.save()
                    signals.points_updated.send(sender=news, pre_points=pre_points)
                    count_updated += 1

                # If the article is abort before. Ignore it.
                # We can check the error log for reasons
                if news.aborted:
                    continue

            else:
                # Create an new one if it does not exist
                news = self.create(url=article['url'],
                                   points=article['points'],
                                   title=smart_str(article['title']))
                count_created += 1

            # Save article whose points is high enough into file
            if (not news.filed) and article['points'] >= POINTS_LIMIT_TO_SAVE:
                try:
                    year, week_number, _ = datetime.date.today().isocalendar()
                    dir_hackernews = settings.HACKER_NEWS_DIR 
                    if not os.path.exists(dir_hackernews):
                        os.mkdir(dir_hackernews)
                    dir_year = os.path.join(dir_hackernews, str(year))
                    if not os.path.exists(dir_year):
                        os.mkdir(dir_year)
                    dir_week = os.path.join(dir_year, "%02d" % week_number)
                    if not os.path.exists(dir_week):
                        os.mkdir(dir_week)
                    bf = BriticleFile(news.url, dir_week)
                except Exception, e:
                    if isinstance(e, URLError) or 'timed out' in str(e):
                        logger.error("URLError or Time out Exception: %s URL: %s" % (e, news.url))
                        news.aborted = True
                        news.save()
                        continue
                    elif isinstance (e, UnicodeEncodeError):
                        logger.error("UnicodeEncodeError: %s URL: %s" % (e, news.url))
                        news.aborted = True
                        news.save()
                        continue
                    raise

                # Abort if there is not content
                if bf.is_empty():
                    logger.info("No content found for: %s" % news.url)
                    news.aborted = True
                    news.save()
                    continue

                try:
                    mobi = bf.save_to_mobi(title=news.title, sent_by="Kindle.io")
                except Exception, e:
                    logger.error("Failed while calling bf.save_to_mobi(). %s: %s URL: %s" % \
                                 (e.__class__, e, news.url))
                    news.aborted = True
                    news.save()
                    continue

                if mobi:
                    news.filed = True
                    news.file_path = mobi
                    news.html = bf.html
                    news.save()
                    signals.file_saved.send(sender=news)
                    count_filed += 1
                else:
                    logger.error("Failed to save file. URL: %s" % news.url)
                    news.aborted = True
                    news.save()