Пример #1
0
def gen_news(title, category, site, url, chksum, pub_date):
    ni = NewsItem()
    ni.site = site
    ni.title = title
    ni.url = url
    ni.pub_date = pub_date
    ni.category = category
    ni.chksum = chksum 
    ni.save()
Пример #2
0
    def _convert_to_news_item(self, e):
        title = e['title']
        ingress = e['introduction']
        subject = 'miehet-edustus' if e['team'] == 'Miehet Edustus' else 'heimo'
        if subject == 'miehet-edustus':
            body = e['content'] + \
                   f"<p class='author'>// {e['text_by']}</p>"
        else:
            if e['team']:
                body = f"<p class='team'>{e['team']}</p>" + \
                       e['content'] + \
                       f"<p class='author'>// {e['text_by']}</p>"
            else:
                body = e['content'] + \
                       f"<p class='author'>// {e['text_by']}</p>"

        try:
            publication_date = datetime.datetime.strptime(
                e['published_at'], '%Y-%m-%dT%H:%M:%S+03:00')
        except ValueError:
            publication_date = datetime.datetime.strptime(
                e['published_at'], '%Y-%m-%dT%H:%M:%S+02:00')
        except TypeError:
            publication_date = datetime.datetime(2012, 1, 1, 0, 0,
                                                 0)  # there are null values..

        publication_date = pytz.timezone('Europe/Helsinki').localize(
            publication_date)
        if NewsItem.objects.filter(title=title,
                                   publication_date=publication_date).exists():
            print(f'Already had {title}')
            return NewsItem.objects.get(title=title,
                                        publication_date=publication_date)

        news_item = NewsItem(
            title=title,
            ingress=ingress,
            subject=subject,
            body=body,
            publication_date=publication_date,
        )

        if e['mainimage']:
            image = requests.get(e['mainimage'])
            fp = BytesIO()
            fp.write(image.content)
            name = urlparse(e['mainimage']).path.split('/')[-1]
            news_item.image.save(name, File(fp))
        news_item.save()
        print(f'Imported #{news_item.id} {news_item.title}')
        return news_item
Пример #3
0
    def test_method_sync_staging__with_news(self):
        # add newssite
        newspage = NewsPage(
            site=self.staticsite,
            index=self.indexpage,
            created_by=self.system_admin_user,
            updated_by=self.system_admin_user,
        )
        newspage.save()
        one_day_ago = timezone.now() - timezone.timedelta(days=1)

        # update IndexPage.newsitems_template_variablename
        self.indexpage.newsitems_template_variablename = 'news_items'
        self.indexpage.save()

        # create is_publish items
        publshed_newsitems_count = 15
        for i in range(publshed_newsitems_count):
            newsitem = NewsItem(
                newspage=newspage,
                publish_on=one_day_ago,
                is_published=True,
                image=self._get_dummy_image_file(
                    sample_image_filename=self.news_image_filename),
                image_relpath=self.news_image_relpath,
                created_by=self.system_admin_user,
                updated_by=self.system_admin_user,
            )
            newsitem.save()

        transferred_relative_paths = self.staticsite.sync(
            update_production=False)
        self.assertTrue(transferred_relative_paths)

        expected_keys = (
            'stylesheets/plugins/bootstrap3.min.css',
            'stylesheets/plugins/drawer.min.css',
            'stylesheets/style.css',
            'index.html',
            'imgs/news/sample-photo.jpeg',
            'news/news_0.html',
            'news/news_1.html',
            'news/news_2.html',
        )
        objects = S3_CLIENT.list_objects(
            Bucket=self.staging_bucket_name)['Contents']
        actual_keys = [obj['Key'] for obj in objects]
        missing = set(expected_keys) - set(actual_keys)
        self.assertFalse(missing, f'missing Keys: {missing}')
Пример #4
0
def news(request, slug=None):
    c = get_common_context(request)
    reset_catalog(request)
    if slug == None:
        items = NewsItem.objects.all()
        paginator = Paginator(items, NEWS_PAGINATION_COUNT)
        page = int(request.GET.get('page', '1'))
        try:
            items = paginator.page(page)
        except PageNotAnInteger:
            page = 1
            items = paginator.page(page)
        except EmptyPage:
            page = paginator.num_pages
            items = paginator.page(page)
        c['page'] = page
        c['page_range'] = paginator.page_range
        if len(c['page_range']) > 1:
            c['need_pagination'] = True
        
        c['news'] = items
        return render_to_response('news.html', c, context_instance=RequestContext(request))
    else:
        c['item'] = NewsItem.get_by_slug(slug)
        return render_to_response('news_item.html', c, context_instance=RequestContext(request))
Пример #5
0
def news(request, slug=None):
    c = get_common_context(request)
    if slug == None:
        items = NewsItem.objects.all()
        paginator = Paginator(items, NEWS_PAGINATION_COUNT)
        page = int(request.GET.get('page', '1'))
        try:
            items = paginator.page(page)
        except PageNotAnInteger:
            page = 1
            items = paginator.page(page)
        except EmptyPage:
            page = paginator.num_pages
            items = paginator.page(page)
        c['page'] = page
        c['page_range'] = paginator.page_range
        if len(c['page_range']) > 1:
            c['need_pagination'] = True

        c['news'] = items
        return render_to_response('news.html',
                                  c,
                                  context_instance=RequestContext(request))
    else:
        c['new'] = NewsItem.get_by_slug(slug)
        return render_to_response('new.html',
                                  c,
                                  context_instance=RequestContext(request))
Пример #6
0
    def process_item(self, item, spider):
        image = item.get('image_data', None)
        image_filename = item.get('image_filename', None)

        current = NewsItem.objects.filter(
            Q(title=item['title'], datetime=item['datetime']) | Q(source=item['source']), generated=True).first()
        if current is not None:
            print(f"{item['title']} - {item['datetime']}\n"
                  f"\t {item['source']} {item.get('image_filename', None)}\n\tCurrent: {current} {current.cover_img}")
        if current is None:
            current = NewsItem(
                title=item['title'],
                datetime=item['datetime'],
                source=item['source'],
                content=item['content'],
                generated=True,
            )
        else:
            current.content = item['content']

        if image_filename is not None:
            if image is None:
                logging.error("Inconsistency. Image name but no image.")
                return
            current.cover_img.save(image_filename, File(io.BytesIO(image)))

        current.gen_summary()
        current.save()
        return item
Пример #7
0
 def done(self, request, cleaned_data):
     ni = NewsItem()
     ni.topic = cleaned_data['topic']
     ni.body = cleaned_data['body']
     ni.title = cleaned_data['title']
     ni.submitter = request.user
     ni.save()
     return HttpResponseRedirect('/news/submitted')
Пример #8
0
def persist(queue):
    while True:
        fetched_item = queue.get()
        news_item = NewsItem(**fetched_item)
        session.add(news_item)

        try:
            session.commit()
            logger.info('Added %s', news_item.title)
        except IntegrityError:
            session.rollback()

        queue.task_done()
Пример #9
0
 def handle(self, *args, **options):
     print "Getting reddit update"
     r = reddit.Reddit('willcritchlow anythinginterestingbot')
     submissions = r.get_subreddit('worldnews').get_top('week')
     for submission in submissions:
         try:
             ni = NewsItem.objects.get(slug=submission.id)
         except:
             ni = NewsItem()
             ni.slug = submission.id
             ni.title = submission.title
             ni.url = submission.short_link # use the short link to guarantee it will fit in the database
             ni.comment_url = submission.permalink
             ni.created = datetime.fromtimestamp(submission.created_utc)
         ni.score = submission.score
         try:
             ni.save()
         except:
             print "Error saving %s" % ni.title
     print "Reddit update complete"
Пример #10
0
    def handle(self, *args, **options):
        words = WORDS[:]
        images = build_image_factory()

        if not len(images):
            images = [None for i in range(NUM_ITEMS)]

        while len(images):
            i = NewsItem()
            random_name = words[random.randint(0, len(words) - 1)]
            i.title = text_type(random_name).capitalize()
            i.slug = "{0}-{1}".format(slugify(i.title), uuid.uuid4())
            i.image = images.pop()
            i.body = text_type(SENTENCES[random.randint(0,
                                                        len(SENTENCES) - 1)])
            i.date_published = radar.random_datetime()

            try:
                i.save()
                words.remove(random_name)

            except Exception as e:
                images.append(i.image)
                logger.debug(e)
Пример #11
0
    def handle(self, *args, **options):
        words = WORDS[:]
        images = build_image_factory()

        if not len(images):
            images = [None for i in range(NUM_ITEMS)]

        while len(images):
            i = NewsItem()
            random_name = words[random.randint(0, len(words) - 1)]
            i.title = text_type(random_name).capitalize()
            i.slug = "{0}-{1}".format(slugify(i.title), uuid.uuid4())
            i.image = images.pop()
            i.body = text_type(SENTENCES[random.randint(0, len(SENTENCES) - 1)])
            i.date_published = radar.random_datetime()

            try:
                i.save()
                words.remove(random_name)

            except Exception as e:
                images.append(i.image)
                logger.debug(e)
Пример #12
0
    def _news(self, title='Test', content='TestContent', enabled=True):
        news = NewsItem(title=title, content=content, enabled=enabled)
        news.save()

        return news
Пример #13
0
def insert_new_new_to_database():
    new_new = get_random_new()
    haber = NewsItem()
    haber.title = new_new
    haber.content = new_new
    haber.save()