def mutate(root, info, title, username, summary, content): ok, post = False, None authors = list(UserModel.objects(username=username)) if len(authors) == 1: author = authors.pop() post = PostModel(title=title, summary=summary, content=content, datetime_created=datetime.datetime.now()) post.save() author.posts.append(post) author.save() ok = True return CreatePost(post=post, ok=ok)
def update_user_post(self, user_id): user = User[user_id] post_set = set((i.id_ for i in user.posts)) vk_posts = self.vk_client.get_posts(user.id_, interval=timedelta(days=1)) for vk_post in vk_posts: try: post = Post[vk_post.owner_id, vk_post.pid] post.comments = vk_post.comments post.likes = vk_post.likes post.reposts = vk_post.reposts post.share_count = vk_post.share_count except pony.orm.core.ObjectNotFound: post = Post(owner=user, id_=vk_post.pid, date=vk_post.date, text=vk_post.text, comments=vk_post.comments, likes=vk_post.likes, reposts=vk_post.reposts, attachments=vk_post.attachments, original_post_id=vk_post.original_post_id, share_count=vk_post.share_count) if post.id_ in post_set: post_set.remove(post.id_) for i in post_set: Post[user_id, i].delete()
def get_last_posts(self): """ Specific implementation of the generic get_last_posts to gather the last post in the web :return: [Post] """ last_posts = [] r = requests.get(self.target_url) html = BeautifulSoup(r.content, 'html.parser') raw_posts = html.findAll("li", {"class": "property-item"}) for post in raw_posts: list_position = post.get('list-position') if not list_position: continue title_element = post.find("a", {"class": "property-title"}) title = cleaning_spaces(self.text(title_element)) href = title_element['href'] description = cleaning_spaces(self.text(post.find("div", {"class": "property-info-wrapper"}))) id_post = str(post['list-item']) price_full = cleaning_spaces(self.text(post.find("div", {"class": "property-price"}))) price = price_full[:-2] # Removing currency symbol image_element = post.find('div', {'class': 'property-img-wrapper'}) image_src = None if image_element: image_src = image_element.get('images-path', '').replace('{width}', 'se') complete_href = href description = '\n'.join([title, price_full, description, complete_href]) last_posts.append(Post(id=id_post, href=complete_href, description=description, image=image_src, price=price)) return last_posts
def add_post(self, point_id, name, type_p, population=0, armor=0, product=0, replenishment=1, map_id=None, session=None): """ Creates new Post in DB. """ _map_id = self.current_map_id if map_id is None else map_id new_post = Post(name=name, type=type_p, population=population, armor=armor, product=product, replenishment=replenishment, map_id=_map_id, point_id=point_id) session.add(new_post) session.commit() # Commit to get post's id. return new_post.id
def store_posts(posts): """Store posts in database""" db_session = sessionLocal() models.base.metadata.create_all(bind=engine) for post in posts: db_post = Post(name=post['name'], url=post['url'], desc=post['desc']) db_session.add(db_post) logging.info(f'Added post: {post}') db_session.commit() db_session.close()
def create(): title = request.json['title'] body = request.json['body'] category_id = request.json['category_id'] category = Category.query.get(category_id) new_post = Post(title, body, category) db.session.add(new_post) db.session.commit() res = post_schema.dump(new_post).data return jsonify(res)
def get_last_posts(self): """ Specific implementation of the generic get_last_posts to gather the last post in the web :return: [Post] """ last_posts = [] r = requests.get(self.target_url) html = BeautifulSoup(r.content, 'html.parser') raw_posts = html.findAll("div", {"class": "list_ads_row"}) for post in raw_posts: title_element = post.find("a", {"class": "subjectTitle"}) title = cleaning_spaces(self.text(title_element)) href = title_element['href'] description = ' -- '.join( map(self.text, [ post.find("p", {"class": "zone"}), post.find("p", {"class": "date"}) ])) id_post = str(post['id']) price_full = cleaning_spaces( self.text(post.find("a", {"class": "subjectPrice"}))) price_str = price_full[:-1] if price_str and len(price_str) > 0: price = int(price_str.replace('.', '')) # Removing currency symbol else: price = -1 image_element = post.find('img', {'class': 'lazy'}) image_src = None if image_element: image_src = image_element.get('title', '') complete_href = href description = '\n'.join([ title, price_full, cleaning_spaces(description), complete_href ]) last_posts.append( Post(id=id_post, href=complete_href, description=description, image=image_src, price=price)) return last_posts
def get_last_posts(self): """ Specific implementation of the generic get_last_posts to gather the last post in the web :return: [Post] """ last_posts = [] r = requests.get(self.target_url) html = BeautifulSoup(r.content, 'html.parser') raw_posts = html.findAll("div", {"class": "card-product"}) for post in raw_posts: title_element = post.find("a", {"class": "product-info-title"}) if not title_element: continue title = cleaning_spaces(self.text(title_element)) href = title_element['href'] description = self.text( post.find("a", {"class": "product-info-category"})) id_post = str(href.split('-')[-1]) price_full = cleaning_spaces( self.text(post.find("span", {"class": "product-info-price"}))) price = re.search( '\d+', price_full) # getting the integer values of the price if price: price = int(price.group()) if price.group() else -1 image_element = post.find('img', {'class': 'card-product-image'}) image_src = None if image_element: image_src = image_element.get('src', '') complete_href = self.crawler_url + href description = '\n'.join([ title, price_full, cleaning_spaces(description), complete_href ]) last_posts.append( Post(id=id_post, href=complete_href, description=description, image=image_src, price=price)) return last_posts
def add_post_form(): print(current_user.get_id()) if request.method == 'POST': titulo = request.form.get('titulo') postagem = request.form.get('postagem') email = request.form.get('email') senha = request.form.get('senha') age = request.form.get('age') hobby = request.form.get('hobby') try: post = Post(titulo=titulo, content=postagem, count_like=0, count_dislike=0, updated_at=datetime.datetime.now(), fg_user=int(current_user.get_id())) db.session.add(post) db.session.commit() return redirect(url_for('main.login_index')) except Exception as e: return (str(e)) return 'deu erro'
def add_post(map_id, point_id, name, type_p, population=0, armor=0, product=0, replenishment=1, session=None): """ Creates a new Post in DB. """ new_post = Post(name=name, type=type_p, population=population, armor=armor, product=product, replenishment=replenishment, map_id=map_id, point_id=point_id) session.add(new_post) session.commit() # Commit to get post's id. return new_post.id
def get_last_posts(self): """ Specific implementation of the generic get_last_posts to gather the last post in the web :return: [Post] """ last_posts = [] r = requests.get(self.target_url) html = BeautifulSoup(r.content, 'html.parser') raw_posts = html.findAll("div", {"class": "aditem-detail-image-container"}) for post in raw_posts: title_element = post.find("a", {"class": "aditem-detail-title"}) title = self.text(title_element) description = self.text(post.find("div", {"class": "tx"})) href = title_element.get('href') id_post = str(id_regex.search(href).group(1)) type_rent = self.text(post.find("div", {"class": "pillDiv"})) price = self.text(post.find("div", {"class": "aditem-price"})) price_num = price[:-2] if price else None image_element = post.find("img", {"class": "ee"}) image = image_element.get('src') if image_element else '' other_info = ' '.join([ self.text(tag) for tag in post.findAll("div", {"class": "tag-mobile"}) ]) complete_href = self.crawler_url + href description = '\n'.join([ title, description, type_rent, other_info, price, complete_href ]) last_posts.append( Post(id=id_post, href=complete_href, description=description, image=image, price=price_num)) return last_posts
async def create_post(user: User, content: dict) -> Post: # This simple example does not handle files # Date does not have to be set, as the database sets it to a default value of now() new_post = Post(uid=user.id, title=content['title'], body=content['body']) session.add(new_post) session.flush() # Handle links if 'links' in content: for link in content['links']: new_link = Link(pid=new_post.id, title=link['title'], url=link['url']) session.add(new_link) try: session.commit() session.flush() return new_post except exc.SQLAlchemyError as err: log.info('Error committing data to database') log.error(str(err))
def get_post_archives(): p = Post.month_list() return p
def resolve_post(root, info, title): return list(PostModel.objects(title=title))