def get_queryset(self): site = self.request.site query = Q() query &= News.get_deleted_query(self.request.user) query &= News.get_published_query(self.request.user) if site: query &= Q(site=site) elif settings.IS_PORTAL_SITE: query &= Q(site__isnull=True) else: query &= Q(id__isnull=True) return super(NewsView, self).get_queryset().filter(query)
def show(request): if request.user.is_authenticated: refreshLastOnline(request.user) context = {} all_profession = getProfessionList() all_Groups = [] profarray = [] key = 1 print(len(all_profession)) for i in all_profession: profarray.append(i) if key % 7 == 0: all_Groups.append({'professions': profarray.copy()}) profarray.clear() key = key + 1 all_Groups.append({'professions': profarray.copy()}) context['all_profession'] = all_Groups context['citylist'] = getCityListFull() context['comments'] = Comments.GetActual(Comments, position_begin=0, position_end=2) context['news'] = News.GetActual(News, position_begin=0, position_end=3) return render(request, 'index.html', context)
def generate_news_stories(user, news_id=0, player=None): """Retrieve specific or random news story""" user_manager = user.managers.get(manager_info__user_manager=True) user_club = user.clubs.get(club_info__user_club=True) # Call generators.generator.news_items function to return a dict list of stories news_stories = news_items({ "user": user, "user_manager": user_manager, "user_club": user_club, "news_id": news_id, "player": player }) # Save generated story or stories to database for story in news_stories: news = News(user=user, **story) news.save()
def process_item(self, item, spider): if item.get('type') == "news": news = News( headline=item.get('headline'), body=item.get('body'), url=item.get('url'), byline=item.get('byLine'), section=item.get('section'), picture=item.get('picture'), ) news.save() elif item.get('type') == "tweet": tweet = Tweet(tweet=item.get('tweet'), time=item.get('time'), user=item.get('user'), user_name=item.get('user_name'), link=item.get('link'), user_picture=item.get('user_picture')) tweet.save() return item
def each_article(self, category, url_list): main = Category(name=category) main.save() for link in url_list: soup = self.soup(link) div = soup.find("div", {"class": "bdaia-post-content"}) p_list = div.find_all("p") description = [] for p in p_list: description.append(p.get_text()) self.article.append(u''.join(description).encode('utf-8')) full = zip(self.title_list, self.href_list, self.description, self.article) for i in full: item = News(item_title=i[0], item_link=i[1], item_short_descr=i[2], article=i[3], category=main) item.save() i = []
def _write_to_db(self, item): try: post = News.objects.get(title=item["title"]) except News.DoesNotExist: post = News(title=item["title"]) post.body = item["description"] post.order = item["order"] post.link = item["link"] post.date = item["date"] post.save() return post.pk
def handle(self, *args, **options): # ... print 'Start' try: cat = Category.objects.get(name='Goods') n = News() n.category = cat n.item_title = ' News of %s' % cat.name n.save() print 'Saving....' except: pass
def handle(self, *arguments, **options): print 'START' i = 1 block = soup.find_all('div', {'class': 'item-container'}) for l in block: record = News() record.title = 'Title %s' % i findText = l.findNext('div', {'class': 'issue-item'}).find_all('p') a = '' for m in findText: a += m.text record.content = a record.urls = l.find('a', { 'class': 'issue-item-title' }).get('href', 'http') record.save() print 'Saving %s' % i i += 1
def news_add(request, **kwargs): news = News(**kwargs) news.save() return {'news': news.tojson()}
'--url', dest='url', help='Url address', ) def handle(self, *args, **options): # ... print 'Start' <<<<<<< HEAD #News.objects.delete() #for r in News.objects.filter(pk__gt=10): # print r.delete() return True for i in range(1,200): record = News() record.title = 'Title %s' % i record.content = 'Content %s' % i record.save() print 'Saving %s' % i if options['url']: print 'Loadd from %s' % options['url'] # ... print 'End' ======= import requests from bs4 import BeautifulSoup