def post(): if request.method == 'POST': new_post = Post(current_user, request.form['text']) print(new_post.body) session.add(new_post) return redirect("/index") return render_template('post.html')
def sign_up(user): statement = """INSERT INTO UserAccount (UserID, FullName, Username, Password, Email, Phone, UserPrivilege, BankName, BankAccount, IdentityCardNumber, ProfileImage) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""" token = validate_user(user) value = user.user_id, user.fullname, user.username, user.password, user.email, user.phone, user.privilege, user.bank_name, user.bank_account, user.identity_card, user.image if token[0] is True: token = Post.create(statement=statement, value=value) return (token, "User Registered Successfully!") if token is True else ( False, "Unable to Registered User!")
def post(self): args = self.parser.parse_args() session = create_session() post = Post( title=args['title'], subtopic_id=args['subtopic_id'], author_id=args['author_id'], lvl_access=args['lvl_access'], ) session.add(post) session.commit() return jsonify({'success': 'OK'})
def populate_feed(current_user, current_board): chat_fields = { "sign_out": logout_url, "username": current_user.username, "user_name": current_user.name, "post_count": len(Post.query().filter(Post.author == current_user.key).fetch()), "user_count": len(User.query().fetch()), "posts": format_posts(Post.query().filter( Post.board == current_board).order(-Post.time).fetch(limit=30)), "users": User.query().order(User.username).fetch(), "board": current_board, } return chat_fields
def get_hot_posts(sub_reddit_name, number=constant.HOT_TRENDING_POSTS_COUNT): print("Getting", number, "hot subreddit posts for ", sub_reddit_name) posts = [] try: for submission in reddit.subreddit(sub_reddit_name).hot(limit=number): try: print("submission: ", submission.id) if getattr(submission, 'over_18', False): continue if getattr(submission, 'author', None): author = getattr(submission.author, 'name', '') else: author = '' print("author: ", author) post = Post(title=getattr(submission, 'title', ''), text=getattr(submission, 'selftext', ''), up_votes=getattr(submission, 'ups', 0), down_votes=getattr(submission, 'downs', 0), url=getattr(submission, 'url', ''), id=getattr(submission, 'id', ''), author=author, sub_reddit_name=sub_reddit_name) print("Pre cleanup", post) if post.author == 'AutoModerator' or len( post.text) > constant.MAX_ALLOWED_CHARS_IN_POST: continue # for c in submission.comments: # try: # if getattr(c, 'author', None): # author = getattr(c.author, 'name', '') # else: # author = '' # print("Comment author: ", author) # comment = Comment(text=getattr(c, 'body', ''), # up_votes=getattr(c, 'ups', 0), # down_votes=getattr(c, 'downs', 0), # author=author) # post.comments.append(comment) # if len(post.comments) == constant.MAX_ALLOWED_COMMENTS_ON_POST: # break # except Exception as e: # print_exception_details(e) # return [] posts.append(post) print(submission) print("Post cleanup", post) except Exception as e: print_exception_details(e) return [] except Exception as e: print_exception_details(e) return [] return posts
def get_current_post(session_id): current_post = Post() if not session_id: return current_post posts = session_posts.get(session_id, []) index = session_index.get(session_id, -1) if not posts or index == -1: return current_post if index < len(posts): current_post = posts[index] session_index[session_id] = index else: try: session_posts.pop(session_id) session_index.pop(session_id) except Exception as e: reddit_api.print_exception_details(e) return current_post
class TestPost(unittest.TestCase): def setUp(self): self.mock_http = MagicMock(autospec=HttpClient) self.mock_post = Post(self.mock_http) def test_get_posts(self): mock_response = Mock(spec=Response) mock_response.json.return_value = { 'postId': 1, 'title': 'My title', 'description': 'Post description' } self.mock_http.request.return_value = mock_response response = self.mock_post.get_posts(1) self.mock_http.request.assert_called_once_with(method="GET", url="https://jsonplaceholder.typicode.com/posts/1") assert response['postId'] == 1
def add_post(topic_id, subtopic_id): session = create_session() if not session.query(SubTopic).filter( SubTopic.id == subtopic_id, SubTopic.topic_id == topic_id).first(): abort(404) form = PostForm() if form.validate_on_submit(): new_post = Post(title=form.title.data, description=form.description.data, author_id=current_user.id, lvl_access=form.lvl_access.data, subtopic_id=subtopic_id, published=True if current_user.role >= 1 else False) session.add(new_post) session.commit() return redirect( f'/topic/{topic_id}/subtopic/{subtopic_id}/post/{new_post.id}') return render_template('edit_post.html', title='Post creating', form=form, title_form='Create new post')
def setUp(self): self.mock_http = MagicMock(autospec=HttpClient) self.mock_post = Post(self.mock_http)
def request(value): statement = """INSERT INTO Request (RequestDate, RequestStatus, RequestType, RentalStartDate, RentalPeriod, UserID, PropertyID) VALUES (?, ?, ?, ?, ?, ?, ?)""" token = Post.create(statement=statement, value=value) return token
def to_post(): if request.method == 'POST': session.add(Post(request.form['text'],current_user)) session.commit() return redirect('title') return render_template('post.html')
def post(self): session = create_session() args = self.post_parse.parse_args() post = Post() tour = get_tour(session, args['tournament_id']) if not tour.have_permission(current_user): abort(403) for key, value in args.items(): if key == 'status': if value: post.__setattr__(key, 1) post.now = False else: post.__setattr__(key, 0) post.now = True else: post.__setattr__(key, value) post.author_id = current_user.get_id() session.add(post) session.commit() return jsonify({ "success": "ok", "post_id": post.id, "status": post.status, "tour_id": post.tournament_id, "now": post.now })
def send(csv_file): # value is .csv file with property list from admin statement = """INSERT INTO BaseProperty (PropertyUID, Price, PropertyType, YearBuilt, TenureType, Bedroom, Bathroom, ExtraRoom, Parking, Size, FloorPlan, Unit, Area, Street, District, State, Postcode, Township, Contract, ContractPeriod, OwnershipID) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""" token = Post.create_multiple(statement=statement, value=csv_file) return token
def register_Property(value): statement = """INSERT INTO BaseProperty (PropertyUID, Price, PropertyType, YearBuilt, TenureType, Bedroom, Bathroom, ExtraRoom, Parking, Size, FloorPlan, Unit, Area, Street, District, State, Postcode, Township) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""" token = Post.create(statement=statement, value=value) return token
def owned(value): statement = """INSERT INTO BaseProperty (PropertyUID, Price, PropertyType, YearBuilt, TenureType, Bedroom, Bathroom, ExtraRoom, Parking, Size, FloorPlan, Unit, Area, Street, District, State, Postcode, Township, Contract, ContactPeriod, RentID, OwnershipID, RentPrice, SellPrice, Images, RentalStartDate, RentalEndDate, RentalPeriod, Description, LastUpdatedDate, RentContract, SellContract) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""" token = Post.create(statement=statement, value=value) return token
def __init__(self, subreddit: str, cache_file: str, reddit: praw.reddit, hot_ttl: int = 86400, new_ttl: int = 21600, top_ttl: int = 2592000): """ Creates a cache of posts from a subreddit. Posts will be loaded from a cache file. If a cache file cannot be located, an empty file will be created at the specified location :param subreddit: The name of the subreddit the posts are scraped from :param cache_file: The path of the cache file :param hot_ttl: The number of seconds before the hot feed is refreshed (Default: 24 hours) :param new_ttl: The number of seconds before the new feed is refreshed (Default: 6 hours) :param top_ttl: The number of seconds before the top feed is refreshed (Default: 30 days) """ self.subreddit = subreddit # The name of the subreddit self.feed_ages = { # The time when feed was last refreshed "hot": 0.0, "new": 0.0, "top": 0.0 } self.ttl = {"hot": hot_ttl, "new": new_ttl, "top": top_ttl} self.posts = set() # A dictionary of all posts on the subreddit self._cache_file = cache_file # The filepath of the cache file self._reddit = reddit # praw.reddit instance # If no cache file found, create one if not os.path.exists(self._cache_file): print( f"Alert: No cache file found for {subreddit} at '{self._cache_file}'" ) # Create empty cache file with open(self._cache_file, 'w') as fp: data = { "version": PostCache._CACHE_FORMAT_VERSION, "subreddit": self.subreddit, "feed_ages": self.feed_ages, "posts": [] } json.dump(data, fp) print("Created new cache file") return # If cache file exists, load it print(f"Checking cache for /r/{self.subreddit}") with open(self._cache_file, 'r') as fp: data = json.load(fp) # Validate cache file required_keys = ["version", "subreddit", "feed_ages", "posts"] for key in required_keys: if key not in data: # Check that cache has all required keys raise ValueError(f"Cache contains no key '{key}'") if not validate_cache_version(PostCache._CACHE_FORMAT_VERSION, data["version"]): raise ValueError( f"Unsupported cache version: Expected '{PostCache._CACHE_FORMAT_VERSION}', " f"was '{data['version']}'") if data["subreddit"].lower() != self.subreddit.lower( ): # Check subreddit raise ValueError( f"Cache contains incorrect subreddit: Expected '{self.subreddit.lower()}', " f"was '{data['subreddit'].lower()}'") # Convert dictionary data to Post objects and store for post_data in data["posts"]: post = Post(post_data["postID"], post_data["title"], post_data["score"]) self.posts.add(post) self.feed_ages = data["feed_ages"] # Show cache size print(f"Cached posts: {len(self.posts)}")
def refresh(self, force: bool = False, limit: int = None): """ Updates the cache for the specified feed(s) if any of the following conditions are true: - Feed ttl has expired - Cache is being force refreshed :param force: If `True`, cache will refresh even if it hasn't expired yet (Default: False) :param limit: The feed limit for PRAW. Determines the maximum number of posts PRAW will fetch when refreshing the feeds. The lower the number, the faster it will refresh, but the less data will be collected. To collect the maximum number (~1000), set to `None` (Default: None) :return: Returns `True` if the feed was refreshed, `False` otherwise """ curr_time = time.time() refreshed = False # Create subreddit object subreddit = self._reddit.subreddit(self.subreddit) # Refresh every feed for feed_name in self.feed_ages: feed_age = self.feed_ages[feed_name] # === Check if feed needs to be refreshed if force: print(f"Alert: Forcing cache refresh on {feed_name}") elif (feed_age + self.ttl[feed_name]) < curr_time: print( f"Alert: {feed_name} cache expired for /r/{self.subreddit} - " f"Refreshing feed...") else: # Cache is still fresh and will not be updated continue # === Refresh expired feed refreshed = True # Get post generator if feed_name == "hot": generator = subreddit.hot(limit=limit) elif feed_name == "new": generator = subreddit.new(limit=limit) else: generator = subreddit.top(limit=limit) # Fetch posts from feed for submission in generator: new_post = Post(submission.id, submission.title, submission.score) # If post already in set, replace with more current score self.posts.discard(new_post) self.posts.add(new_post) # Update cache age self.feed_ages[feed_name] = curr_time # Show cache size after refresh print(f"Cached posts: {len(self.posts)}") return refreshed
def create(value): statement = """INSERT INTO Billing (IssuedDate, BillingType, BillingAmount, PaymentDate, PropertyID) VALUES (?, ?, ?, ?, ?)""" token = Post.create(statement=statement, value=value) return token
def __init__(self): self.post = Post()