def setUp(self): # création d'un admin admin = User.objects.create_user('admin', password='******') admin.is_staff = True admin.save() self.admin = admin # création d'un utilisateur sans droit reader = User.objects.create_user('reader', password='******') reader.save() self.reader = reader # création d'un utilisateur ayant des droits writer = User.objects.create_user('writer', password='******') writer.has_perm('articles.add_article') writer.save() self.writer = writer # création de plusieurs articles a1 = Article(title="titre 1", content="content 1", author=writer, views=1) a1.save() a2 = Article(title="titre 2", content="content 2", author=admin, likes=1) a2.save()
def post(self, request): """ :param request: :return: """ error_message = None response_article = Article.objects.filter( pk=request.POST.get("id_article_answered")) if len(response_article) > 0: article_with_data = Article(user=request.user, article_answered=response_article[0]) else: article_with_data = Article(user=request.user) article_form = ArticleForm(request.POST, request.FILES, instance=article_with_data) if article_form.is_valid(): new_article = article_form.save() return HttpResponseRedirect( reverse('article_details', args=[new_article.user.username, new_article.pk])) context = {'form': article_form, 'message': error_message} return render(request, 'articles/create.html', context)
def test_slug_uniqueness(self): article = Article(title='foo', flavor=Article.EVENT) article.full_clean() article.save() slug1 = article.slug article = Article(title='foo', flavor=Article.EVENT) article.full_clean() article.save() slug2 = article.slug self.assertNotEqual(slug1, slug2)
async def main(): print("Started updating databases") try: top_stories = requests.get( "https://hacker-news.firebaseio.com/v0/topstories.json?print=pretty" ).json() ArticleBackup.objects.all().delete( ) #deleting contents of storage and adding new entries for i in range( 0, 30, 3): #using the asyncio module to run 3 processes at a time await asyncio.gather(storeArticle(str(top_stories[i])), storeArticle(str(top_stories[i + 1])), storeArticle(str(top_stories[i + 2]))) #print(datetime.datetime.now()) Article.objects.all().delete( ) #deleting contents of cache and adding entries from storage for item in ArticleBackup.objects.all(): article = Article() article.url = item.url article.title = item.title article.score = item.score article.by = item.by article.sentimentPolarity = item.sentimentPolarity # print(type(article)) article.save() #print(datetime.datetime.now()) print("Database updation completed") except: print("Error in updating articles") return
def editar_article(request, id_article): EditForm = modelform_factory(Article, fields=('nom', 'consola', 'esconsola', 'preu', 'PEGI', 'stock', 'companyia', 'imatge', 'coleccionista', 'detalls', 'video', 'esaccesori')) unEdit = Article() #comprovem que existeix l'oferta_disc if id_article: unEdit = get_object_or_404(Article, pk=id_article) if request.method == 'POST': form = EditForm(request.POST, request.FILES, instance=unEdit) if form.is_valid(): form.save() messages.info(request, "article canviat correctament") return redirect("usuaris:menu_usuari") else: form = EditForm(instance=unEdit) for f in form.fields: form.fields[f].widget.attrs['class'] = 'formulari' form.fields['nom'].widget.attrs['placeholder'] = "Nom" form.fields['consola'].widget.attrs['placeholder'] = "Consola" form.fields['companyia'].widget.attrs['placeholder'] = "companyia" form.fields['detalls'].widget.attrs['placeholder'] = "detalls" form.fields['preu'].widget.attrs['placeholder'] = "Preu" return render(request, 'articles/editar_article.html', {'form': form})
def fetch_articles(): articles = list() if not DEBUG: sql = """ SELECT p.ID, p.post_title, u.display_name as author, p.post_date_gmt FROM `wp_posts` p, `wp_users` u WHERE p.post_author = u.ID AND p.post_type = 'post' AND p.post_status = 'publish' ORDER BY p.post_date_gmt DESC """ else: sql = """ select a.id, a.subject, b.first_name, a.update_date_time from articles_article a, auth_user b where a.author_id = b.id order by a.create_date_time desc limit 10 """ connection = get_connection() cursor = connection.cursor() cursor.execute(sql) for row in cursor.fetchall(): article = Article() article.id = row[0] article.subject = row[1] article.create_date_time = row[3] articles.append(article) connection.close() return articles
def run(): wb = xlrd.open_workbook(path) sheet = wb.sheet_by_index(0) sheet.cell_value(0, 0) rows = [] for j in range(1, sheet.nrows): row = [] for i in range(sheet.ncols): if i not in [10, 11]: if i < 4: row.append(int(sheet.cell_value(j, i))) else: row.append(sheet.cell_value(j, i)) print(len(row)) article = Article(job_id=row[0], ns_category_id=row[1], location_id=row[2], job_type_id=row[3], filename=row[4], job_title=row[5], job_slug=row[6], job_description=row[7], job_image=row[8], job_status=row[9], location=row[10], House_of=row[11], Year=row[12], epaperlink=row[13], imagelink=row[14], imagelinkautogenerate=row[15]) article.save() print(article)
def handle(self, *args, **options): Tag.objects.bulk_create([Tag(tag=t[0], slug=t[1]) for t in tags], ignore_conflicts=True) names = generate_username(int(options["num_users"])) User = get_user_model() users = [ User.objects.create_user(username=n, password=self.password) for n in names ] print(users) gen = DocumentGenerator() gen.init_word_cache(5000) gen.init_sentence_cache(5000) for user in users: user = User.objects.get(username=user.username) user.profile.bio = gen.sentence() user.profile.save() articles = Article.objects.bulk_create([ Article( slug=make_slug(gen.sentence()), title=gen.sentence(), description=gen.sentence(), body=gen.paragraph(), author=user.profile, ) # Make sure every user has at least 1 article for _ in range(random.randrange(1, self.article_upper_bound)) ]) print(articles) # Make sure every article has 1 tag, could add more later for article in articles: article.tags.add(Tag.objects.get(slug=random.choice(tags)[1])) self.stdout.write(self.style.SUCCESS(f"Created {len(users)} users"))
def create_article(): user = current_identity if user.is_not_author() and user.is_not_admin(): pass title = request.json.get('title') description = request.json.get('description') body = request.json.get('body') tags_input = request.json.get('tags') categories_input = request.json.get('categories') tags = [] categories = [] for category in categories_input: categories.append( get_or_create(db.session, Category, {'description': category.get('description', None)}, name=category['name'])[0]) for tag in tags_input: tags.append(get_or_create(db.session, Tag, {'description': tag.get('description')}, name=tag['name'])[0]) user_id = user.id article = Article(title=title, description=description, body=body, user_id=user_id, tags=tags, categories=categories) db.session.add(article) db.session.commit() response = {'full_messages': ['Article created successfully']} response.update(ArticleDetailsSerializer(article).data) return jsonify(response)
def create(request): if request.method == 'POST': form = ArticleForm(request.POST) if form.is_valid(): article = Article(title=form.cleaned_data['title'], body=form.cleaned_data['body'], pub_date=timezone.now(), author=request.user) article.save() return render( request, 'article.html', { 'article': article, 'message': 'You added new article!', 'request': request, 'likes': Likes.objects.filter(article=article).count(), 'userlike': Likes.objects.filter(article=article, author=request.user), }) else: return render(request, 'article_create.html', { 'message': 'Please, enter valid input!', 'request': request, }) return render(request, 'article_create.html', { 'request': request, })
def test_failure_due_to_long_title(self): long_title = 'a' * 101 article = Article(title=long_title, text=self.text, author=self.user) # with self.assertRaises(ValidationError): expected_message = 'Ensure this value has at most 100 characters (it has 101).' with self.assertRaisesMessage(ValidationError, expected_message): article.full_clean()
def get_data(data_type='article', category='new'): if data_type == 'article' and category not in ['new', 'page']: logging.error( "Category must be 'new' or 'page' (for front page articles)") return [] hn = httplib.HTTPConnection('api.ihackernews.com') # get the endpoint for the api query path = category if data_type == 'article' else 'newcomments' hn.request('GET', '/' + path) r1 = hn.getresponse() if r1.status == 200: try: text = r1.read() data_dict = json.loads(text) except Exception, e: logging.error('Failed to parse Json response: %s', e) return [] # get the list of either comments or articles based on data_type if data_type == 'article': if category == 'new': # only need the first 5 articles - will improve the sort time later (this list is already sorted) data_dict['items'] = data_dict['items'][:5] data_list = [Article(**i) for i in data_dict['items']] else: data_list = [ArtComment(**i) for i in data_dict['comments'][:5]] if data_type != 'article' or category == 'new': # list is already sorted return (True, data_list[:5]) else: # need to sort to find the newest front page articles return (True, sorted(data_list)[:5])
def insert(**overrides): article_data = make_article(**overrides) article = Article(**article_data) db_session.add(article) db_session.commit() return article
async def main(): try: top_stories = requests.get( "https://hacker-news.firebaseio.com/v0/topstories.json?print=pretty" ).json() ArticleBackup.objects.all().delete() for i in range(0, 30, 3): await asyncio.gather(storeArticle(str(top_stories[i])), storeArticle(str(top_stories[i + 1])), storeArticle(str(top_stories[i + 2]))) #print(datetime.datetime.now()) Article.objects.all().delete() for item in ArticleBackup.objects.all(): article = Article() article.url = item.url article.title = item.title article.score = item.score article.by = item.by article.sentimentPolarity = item.sentimentPolarity # print(type(article)) article.save() #print(datetime.datetime.now()) except: print("Error in updating articles") return
def post(self, request, *args, **kwargs): article = Article(item=request.data["item"], quantity=request.data["quantity"], price=request.data["price"], tag_id=request.data["tag"], invoice_id=request.data["invoice"]) article.save() return Response(status=status.HTTP_200_OK)
def test_was_written_recently_with_article_written_under_seven_days_ago( self): """ was_written_recently() should return False if the pub_date is greater than seven days ago """ article = Article(pub_date=timezone.now() + datetime.timedelta(days=10)) self.assertEqual(article.was_written_recently(), True)
def create_articles(self, emails): """Attempts to post new articles based on parsed email messages""" created = [] site = Site.objects.get_current() ack = self.config.get('acknowledge', False) autopost = self.config.get('autopost', False) # make sure we have a valid default markup markup = self.config.get('markup', MARKUP_HTML) if markup not in (MARKUP_HTML, MARKUP_MARKDOWN, MARKUP_REST, MARKUP_TEXTILE): markup = MARKUP_HTML for num, email in emails.iteritems(): name, sender = parseaddr(email['From']) try: author = User.objects.get(email=sender, is_active=True) except User.DoesNotExist: # unauthorized sender self.log('Not processing message from unauthorized sender.', 0) continue # get the attributes for the article title = email.get('Subject', '--- article from email ---') content = self.get_email_content(email) try: # try to grab the timestamp from the email message publish_date = datetime.fromtimestamp( time.mktime(parsedate(email['Date']))) except StandardError, err: self.log( "An error occurred when I tried to convert the email's timestamp into a datetime object: %s" % (err, )) publish_date = datetime.now() # post the article article = Article( author=author, title=title, content=content, markup=markup, publish_date=publish_date, is_active=autopost, ) try: article.save() self.log('Article created.') except StandardError, err: # log it and move on to the next message self.log('Error creating article: %s' % (err, ), 0) continue
def start_scraper(self): request = requests.get(self.URL) soup = BeautifulSoup(request.content, 'html.parser') articles = soup.find_all("url") for article in articles: #From xml url item = {} item['url'] = article.find('loc').text item['publisher'] = Publisher.objects.get(name="Daily Express") if "/live/" in item['url'] or '/gallery/' in item['url']: #ignoring live and gallery articles continue #printing next url more = requests.get(item['url']) more_soup = BeautifulSoup(more.content, 'html.parser') print(item['url']) item['headline'] = more_soup.find( 'meta', {"property": "og:title"})['content'] #Author item['author'] = more_soup.find("meta", {"name": "author"})['content'] # Datetime parser publication_date_str = more_soup.find( "meta", {"property": "article:published_time"})['content'] item['published_at'] = dateutil.parser.parse(publication_date_str) #get section and subsection lis = more_soup.find("ul", {"class": "main-nav"}).find_all('li') item['section'] = self.get_current_section(lis) lis_subsection = more_soup.find("ul", { "class": "page sub-nav" }).find_all('li') item['subsection'] = self.get_current_section(lis_subsection, sub=True) #try: item['content'] = '' item['content_length'] = more_soup.find( 'meta', {"property": "article:word_count"})['content'] item['keywords'] = more_soup.find( 'meta', {"name": "news_keywords"})['content'] model = Article(**item) model.save() """body_content = more_soup.find("div", {"itemprop":"articleBody"}) if not body_content: body_content = more_soup.find("div", {"itemprop":"reviewBody"}) if not body_content: body_content = more_soup.find("div", {"class":"content__standfirst"}) ps = body_content.find_all('p') body = [x.text for x in ps] content = ' '.join(body) counter = len(re.findall(r'\w+', content))""" print(item) """except Exception as e:
def article(): article = Article() article.title = 'title1' article.description = 'description1' article.updated_at = 'updated_at1' article.category = category() article.published = 'published1' return article
def _create_article(name, content): """ Create an article in the DB w/ the given title and content """ article = Article(name=name) article.save() revision = ArticleRevision(article=article, content=content) revision.save() return article
def create_articles(): for article_info in content: article = Article() for key in article_info: try: setattr(article, key, article_info[key]) except AttributeError: pass article.save()
def deal_add_article(request): title = request.POST["title"] content = request.POST["content"] time_stamp = datetime.datetime.now().strftime('%Y-%m-%d') article = Article(title = title, content = content, time_stamp = time_stamp, ) article.save() return HttpResponseRedirect("/manager/article_list/")
def create(self, validated_data): article = Article( judul=validated_data['judul'], slug=slugify(validated_data['judul']), isi=validated_data['isi'], kategori=validated_data['kategori'], penulis=validated_data['penulis'], ) article.save() return article
def handle(self, *args, **options): with open('articles.json', encoding='utf-8') as jsonfile: articles = json.load(jsonfile) for line in articles: article = Article(image=line['fields']['image'], title=line['fields']['title'], text=line['fields']['text'], published_at=line['fields']['published_at']) article.save()
def readArticle(request, id_art): t= get_template('article_to_read.html') articles = Article.objects.filter(author_id=request.user.id) displayed = Article() if articles.exists(): displayed = Article.objects.get(id=id_art) else: displayed.id = -5 html = t.render({'articles':articles, 'displayed':displayed}, request) return HttpResponse(html)
def handle(self, *args, **options): with open('phones.csv', 'r') as csvfile: phone_reader = csv.reader(csvfile, delimiter=';') # пропускаем заголовок next(phone_reader) for line in phone_reader: p = Article(*line) p.save()
def post(self, request): article = Article() article.author = request.user form = ArticleForm(request.POST, request.FILES, instance=article) if form.is_valid(): article = form.save() return redirect('user_articles_detail.html', self.id)
def readContent(self): with open(staticfiles_storage.path('content_api.json'), encoding="utf-8") as f: data = json.load(f) for article in data['results']: if not Article.objects.filter(uuid=article['uuid']).exists(): newArticle = Article() newArticle.authorByLine = article['byline'] newArticle.body = article['body'] newArticle.publishDate = article['publish_at'].split('T')[0] newArticle.headline = article['headline'] newArticle.promo = article['promo'] newArticle.disclosure = article['disclosure'] newArticle.uuid = article['uuid'] newArticle.slug = slugify(article['headline']) for img in article['images']: if img['featured']: newArticle.featuredImage = img['url'] for ins in article['instruments']: newArticle.save() if Instrument.objects.filter( instrumentId=ins['instrument_id']).exists(): newArticle.relatedInstruments.add( Instrument.objects.get( instrumentId=ins['instrument_id'])) else: i = Instrument() i.instrumentId = ins['instrument_id'] i.companyName = ins['company_name'] i.symbol = ins['symbol'] i.exchange = ins['exchange'] i.save() newArticle.relatedInstruments.add(i) for tag in article['tags']: newArticle.save() if Tag.objects.filter(uuid=tag['uuid']).exists(): newArticle.tags.add(Tag.objects.get(uuid=tag['uuid'])) else: t = Tag() t.uuid = tag['uuid'] t.tagName = tag['name'] t.tagSlug = tag['slug'] t.tagTypeName = tag['tag_type']['name'] t.tagTypeSlug = tag['tag_type']['slug'] t.save() newArticle.tags.add(t) newArticle.save() for tag in article['tags']: if self.first == '' and tag['slug'] == '10-promise': self.first = Article.objects.get(uuid=article['uuid'])
def handle(self, *args, **options): with open(settings.ARTICLES, encoding='utf-8') as file: reader = json.load(file) for article in reader: new_article = Article( title=article['fields']['title'], text=article['fields']['text'], published_at=article['fields']['published_at'], image=article['fields']['image']) new_article.save()
def add_article_view(request): article = Article() if request.method == "POST": serializer = ArticleSerializer(article, data=request.data) if serializer.is_valid(): serializer.save() # data["success"] = 'Article Added' return Response(serializer.data, status.HTTP_201_CREATED) return Response(serializer.errors, status.HTTP_400_BAD_REQUEST)