def bump(pk=None): from biostar.apps.posts.models import Post from biostar.apps.users.models import User from common import general_util if not pk: query = Post.objects.filter(type=Post.QUESTION, status=Post.OPEN) value = random.random() if value > 0.5: since = general_util.now() - timedelta(weeks=10) query = query.filter(reply_count=0, creation_date__gt=since) query = query.values_list("id") ids = [p[0] for p in query] pk = random.choice(ids) community = User.objects.get(pk=1) post = Post.objects.get(pk=pk) logger.info(post.title) if not post.is_toplevel: logger.warning("post is not at toplevel") post.lastedit_date = general_util.now() post.lastedit_user = community post.save()
def clean(self): cleaned_data = super(LongForm, self).clean() parent_post_id = cleaned_data.get("parent_post_id") if parent_post_id: # Find the parent. try: parent = Post.objects.get(pk=parent_post_id) except ObjectDoesNotExist, exc: raise ValidationError( "Parent post {} does not exist. Perhaps it was deleted request".format( parent_post_id ) ) post_preview = PostPreview( parent_post_id = parent_post_id, title = parent.title, tag_val = parent.tag_val, tag_value = html_util.split_tags(parent.tag_val), content=cleaned_data.get('content'), type=int(cleaned_data.get('post_type')), date=general_util.now() )
def time_ago(date): # Rare bug. TODO: Need to investigate why this can happen. if not date or date == '*** MISSING ***': return '' if not isinstance(date, datetime): date = dateutil.parser.parse(date) delta = general_util.now() - date if delta < timedelta(minutes=1): return 'just now' elif delta < timedelta(hours=1): unit = pluralize(delta.seconds // 60, "minute") elif delta < timedelta(days=1): unit = pluralize(delta.seconds // 3600, "hour") elif delta < timedelta(days=30): unit = pluralize(delta.days, "day") elif delta < timedelta(days=90): unit = pluralize(int(delta.days / 7), "week") elif delta < timedelta(days=730): unit = pluralize(int(delta.days / 30), "month") else: diff = delta.days / 365.0 unit = '%0.1f years' % diff return "%s ago" % unit
def messages(): for sub in subs: message = Message(user=sub.user, body=body, sent_at=body.sent_at) # collect to a bulk email if the subscription is by email: if sub.type in (EMAIL_MESSAGE, ALL_MESSAGES): try: token = ReplyToken(user=sub.user, post=post, token=general_util.make_uuid(8), date=general_util.now()) from_email = settings.EMAIL_FROM_PATTERN % ( author.pubkey, settings.DEFAULT_FROM_EMAIL) from_email = from_email.encode("utf-8") reply_to = settings.EMAIL_REPLY_PATTERN % token.token subject = settings.EMAIL_REPLY_SUBJECT % body.subject # create the email message email = mail.EmailMultiAlternatives( subject=subject, body=email_text, from_email=from_email, to=[sub.user.pubkey], headers={'Reply-To': reply_to}, ) email.attach_alternative(email_html, "text/html") emails.append(email) tokens.append(token) except Exception, exc: # This here can crash the post submission hence the catchall logger.error(exc) yield message
def save(self, *args, **kwargs): if not self.id: # Set the date to current time if missing. self.date = self.date or general_util.now() super(Subscription, self).save(*args, **kwargs)
def items(self): # Delay posts hours. delay_time = general_util.now() - timedelta(hours=2) posts = Post.objects.filter( type__in=Post.TOP_LEVEL, status=Post.OPEN, creation_date__lt=delay_time).order_by('-creation_date') return posts[:FEED_COUNT]
def save(self, *args, **kwargs): "Actions that need to be performed on every user save." if not self.id: # This runs only once upon object creation. self.uuid = general_util.make_uuid() self.last_login = general_util.now() super(User, self).save(*args, **kwargs)
def create(sender, instance, created, *args, **kwargs): "Creates a subscription of a user to a post" user = instance.author root = instance.root if Subscription.objects.filter(post=root, user=user).count() == 0: sub_type = user.profile.message_prefs if sub_type == const.DEFAULT_MESSAGES: sub_type = const.EMAIL_MESSAGE if instance.is_toplevel else const.LOCAL_MESSAGE sub = Subscription(post=root, user=user, type=sub_type) sub.date = general_util.now() sub.save() # Increase the subscription count of the root. Post.objects.filter(pk=root.id).update(subs_count=F('subs_count') + 1)
def apply_sort(request, query): # Note: the naming here needs to match that in the server_tag.py template tags. # Apply sort order sort = request.GET.get('sort', const.POST_SORT_DEFAULT) field = const.POST_SORT_MAP.get(sort, "-lastedit_date") query = query.order_by("-sticky", field) # Apply time limit. limit = request.GET.get('limit', const.POST_LIMIT_DEFAULT) days = const.POST_LIMIT_MAP.get(limit, 0) if days: delta = general_util.now() - timedelta(days=days) query = query.filter(lastedit_date__gt=delta) return query
def save(self, *args, **kwargs): # Clean the info fields. self.info = bleach.clean(self.info, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES, styles=ALLOWED_STYLES) if not self.id: # This runs only once upon object creation. self.date_joined = self.date_joined or general_util.now() self.last_login = self.date_joined super(Profile, self).save(*args, **kwargs)
def get_users(self, sort, limit, q): sort = const.USER_SORT_MAP.get(sort, None) days = const.POST_LIMIT_MAP.get(limit, 0) if q: query = self.filter(pubkey__icontains=q) else: query = self if days: delta = general_util.now() - timedelta(days=days) query = self.filter(profile__last_login__gt=delta) query = query.exclude( status=User.BANNED).select_related("profile").order_by(sort) return query
def save(self, *args, **kwargs): # Sanitize the post body. self.html = html_util.parse_html(self.content) # Must add tags with instance method. This is just for safety. self.tag_val = html_util.strip_tags(self.tag_val) # Posts other than a question also carry the same tag if self.is_toplevel and self.type != Post.QUESTION: required_tag = self.get_type_display() if required_tag not in self.tag_val: self.tag_val += "," + required_tag if not self.id: # Set the titles if self.parent and not self.title: self.title = self.parent.title if self.parent and self.parent.type in (Post.ANSWER, Post.COMMENT): # Only comments may be added to a parent that is answer or comment. self.type = Post.COMMENT if self.type is None: # Set post type if it was left empty. self.type = self.COMMENT if self.parent else self.FORUM # This runs only once upon object creation. self.title = self.parent.title if self.parent else self.title self.lastedit_user = self.author self.status = self.status or Post.PENDING self.creation_date = self.creation_date or general_util.now() self.lastedit_date = self.creation_date # Set the timestamps on the parent if self.type == Post.ANSWER and self.parent: self.parent.lastedit_date = self.lastedit_date self.parent.lastedit_user = self.lastedit_user self.parent.save() # Recompute post reply count self.update_reply_count() super(Post, self).save(*args, **kwargs)
.format(parent_post_id)) post_preview = PostPreview(parent_post_id=parent_post_id, title=parent.title, tag_val=parent.tag_val, tag_value=html_util.split_tags( parent.tag_val), content=cleaned_data.get('content'), type=int(cleaned_data.get('post_type')), date=general_util.now()) else: post_preview = PostPreview(title=cleaned_data.get('title'), content=cleaned_data.get('content'), tag_val=cleaned_data.get('tag_val'), type=int(cleaned_data.get('post_type')), date=general_util.now()) try: serialized = post_preview.serialize_memo() except json_util.MemoTooLarge as err: raise ValidationError( ('%(msg)s. ' 'Sorry, we are not going to be able to fit this into a lightning payment memo.' ), code='too_big_serialized', params={ 'max_size': settings.MAX_MEMO_SIZE, 'msg': "{0}".format(err) }, )
def age_in_days(self): delta = general_util.now() - self.creation_date return delta.days
def run_one_node(self, node): start_time = time.time() invoices_details = lnclient.listinvoices( index_offset=node.global_checkpoint, rpcserver=node.rpcserver, mock=settings.MOCK_LN_CLIENT) if node not in self.all_invoices_from_db: invoice_list_from_db = {} logger.info("DB has no invoices for this node") else: invoice_list_from_db = self.all_invoices_from_db[node] # example of invoices_details: {"invoices": [], 'first_index_offset': '5', 'last_index_offset': '72'} invoice_list_from_node = invoices_details['invoices'] self.invoice_count_from_nodes[node] = len(invoice_list_from_node) if settings.MOCK_LN_CLIENT: # Here the mock pulls invoices from DB Invoice model, while in prod invoices are pulled from the Lightning node # 1. Mocked lnclient.listinvoices returns an empty list # 2. The web front end adds the InvoiceRequest to the DB before it creates the actual invoices with lnclient.addinvoice # 3. Mocked API lnclient.addinvoice simply fakes converting InvoiceRequest to Invoice and saves to DB # 4. Here the mocked proces_tasks pulls invoices from DB Invoice model and pretends they came from lnclient.listinvoices # 5. After X seconds passed based on Invoice created time, here Mock update the Invoice checkpoint to "done" faking a payment invoice_list_from_node = [] for invoice_obj in Invoice.objects.filter( lightning_node=node, checkpoint_value="no_checkpoint"): invoice_request = InvoiceRequest.objects.get( id=invoice_obj.invoice_request.id) if invoice_request.lightning_node.id != node.id: continue mock_setteled = (invoice_obj.created + timedelta(seconds=3) < timezone.now()) creation_unixtime = int( time.mktime(invoice_obj.created.timetuple())) invoice_list_from_node.append({ "settled": mock_setteled, "settle_date": str(int(time.time())) if mock_setteled else 0, "state": "SETTLED" if mock_setteled else "OPEN", "memo": invoice_request.memo, "add_index": invoice_obj.add_index, "payment_request": invoice_obj.pay_req, "pay_req": invoice_obj.pay_req, # Old format "r_hash": invoice_obj.r_hash, "creation_date": str(creation_unixtime), "expiry": str(creation_unixtime + 120) }) retry_mini_map = { int(invoice['add_index']): False for invoice in invoice_list_from_node } one_hour_ago = timezone.now() - timedelta(hours=1) recent_invoices = [ i.id for i in invoice_list_from_db.values() if i.modified > one_hour_ago ] if len(recent_invoices) == 0: logger.info("invoice_list_from_db is empty") else: logger.info( "Recent invoice_list_from_db was: {}".format(recent_invoices)) for raw_invoice in invoice_list_from_node: # Example of raw_invoice: # { # 'htlcs': [], # 'settled': False, # 'add_index': '5', # 'value': '1', # 'memo': '', # 'cltv_expiry': '40', 'description_hash': None, 'route_hints': [], # 'r_hash': '+fw...=', 'settle_date': '0', 'private': False, 'expiry': '3600', # 'creation_date': '1574459849', # 'amt_paid': '0', 'features': {}, 'state': 'OPEN', 'amt_paid_sat': '0', # 'value_msat': '1000', 'settle_index': '0', # 'amt_paid_msat': '0', 'r_preimage': 'd...=', 'fallback_addr': '', # 'payment_request': 'lnbc...' # } created = general_util.unixtime_to_datetime( int(raw_invoice["creation_date"])) if created < general_util.now() - settings.INVOICE_RETENTION: logger.info( "Got old invoice from listinvoices, skipping... {} is older then retention {}" .format(created, settings.INVOICE_RETENTION)) continue add_index_from_node = int(raw_invoice["add_index"]) invoice = invoice_list_from_db.get(add_index_from_node) if invoice is None: logger.error( "Unknown add_index {}".format(add_index_from_node)) logger.error( "Raw invoice from node was: {}".format(raw_invoice)) if raw_invoice['state'] == "CANCELED": logger.error("Skipping because invoice is cancelled...") retry_mini_map[ add_index_from_node] = False # advance global checkpoint else: retry_mini_map[ add_index_from_node] = True # try again later continue # Validate if invoice.invoice_request.memo != raw_invoice["memo"]: logger.error( "Memo in DB does not match the one in invoice request: db=({}) invoice_request=({})" .format(invoice.invoice_request.memo, raw_invoice["memo"])) retry_mini_map[add_index_from_node] = True # try again later continue if invoice.pay_req != raw_invoice["payment_request"]: logger.error( "Payment request does not match the one in invoice request: db=({}) invoice_request=({})" .format(invoice.pay_req, raw_invoice["payment_request"])) retry_mini_map[add_index_from_node] = True # try again later continue checkpoint_helper = CheckpointHelper( node=node, invoice=invoice, creation_date=raw_invoice["creation_date"]) if checkpoint_helper.is_checkpointed(): continue if raw_invoice['state'] == 'CANCELED': checkpoint_helper.set_checkpoint("canceled") continue if raw_invoice['settled'] and (raw_invoice['state'] != 'SETTLED' or int(raw_invoice['settle_date']) == 0): checkpoint_helper.set_checkpoint("inconsistent") continue if time.time() > int(raw_invoice['creation_date']) + int( raw_invoice['expiry']): checkpoint_helper.set_checkpoint("expired") continue if not raw_invoice['settled']: logger.info("Skipping invoice at {}: Not yet settled".format( checkpoint_helper)) retry_mini_map[ checkpoint_helper.add_index] = True # try again later continue # # Invoice is settled # logger.info( "Processing invoice at {}: SETTLED".format(checkpoint_helper)) memo = raw_invoice["memo"] try: action_details = json_util.deserialize_memo(memo) except json_util.JsonUtilException: checkpoint_helper.set_checkpoint("deserialize_failure") continue try: validators.validate_memo(action_details) except ValidationError as e: logger.exception(e) checkpoint_helper.set_checkpoint("memo_invalid") continue action = action_details.get("action") if action: if action in ["Upvote", "Accept"]: vote_type = Vote.VOTE_TYPE_MAP[action] change = settings.PAYMENT_AMOUNT post_id = action_details["post_id"] try: post = Post.objects.get(pk=post_id) except (ObjectDoesNotExist, ValueError): logger.error( "Skipping vote. The post for vote does not exist: {}" .format(action_details)) checkpoint_helper.set_checkpoint("invalid_post") continue user = get_anon_user() logger.info( "Creating a new vote: author={}, post={}, type={}". format(user, post, vote_type)) vote = Vote.objects.create(author=user, post=post, type=vote_type) # Update user reputation # TODO: reactor score logic to be shared with "mark_fake_test_data.py" User.objects.filter(pk=post.author.id).update( score=F('score') + change) # The thread score represents all votes in a thread Post.objects.filter(pk=post.root_id).update( thread_score=F('thread_score') + change) if vote_type == Vote.ACCEPT: if "sig" not in action_details: checkpoint_helper.set_checkpoint("sig_missing") continue sig = action_details.pop("sig") sig = validators.pre_validate_signature(sig) verifymessage_detail = lnclient.verifymessage( msg=json.dumps(action_details, sort_keys=True), sig=sig, rpcserver=node.rpcserver, mock=settings.MOCK_LN_CLIENT) if not verifymessage_detail["valid"]: checkpoint_helper.set_checkpoint( "invalid_signiture") continue if verifymessage_detail[ "pubkey"] != post.parent.author.pubkey: checkpoint_helper.set_checkpoint( "signiture_unauthorized") continue if change > 0: # First, un-accept all answers for answer in Post.objects.filter( parent=post.parent, type=Post.ANSWER): if answer.has_accepted: Post.objects.filter(pk=answer.id).update( vote_count=F('vote_count') - change, has_accepted=False) # There does not seem to be a negation operator for F objects. Post.objects.filter(pk=post.id).update( vote_count=F('vote_count') + change, has_accepted=True) Post.objects.filter(pk=post.root_id).update( has_accepted=True) else: # TODO: change "change". here change is set to payment ammount, so does not make sense to be called change # TODO: detect un-accept attempt and raise "Un-accept not yet supported" raise Exeption( "Payment ammount has to be positive") else: Post.objects.filter(pk=post.id).update( vote_count=F('vote_count') + change) # Upvote on an Aswer is the trigger for potentian bounty awards if post.type == Post.ANSWER and post.author != get_anon_user( ): award_bounty(question_post=post.parent) checkpoint_helper.set_checkpoint("done", action_type="upvote", action_id=post.id) elif action == "Bounty": valid = True for keyword in ["post_id", "amt"]: if keyword not in action_details: logger.warn( "Bounty invalid because {} is missing".format( keyword)) valid = False if not valid: logger.warn("Could not start Bounty: bounty_invalid") checkpoint_helper.set_checkpoint("bounty_invalid") continue post_id = action_details["post_id"] amt = action_details["amt"] try: post_obj = Post.objects.get(pk=post_id) except (ObjectDoesNotExist, ValueError): logger.error( "Bounty invalid because post {} does not exist". format(post_id)) checkpoint_helper.set_checkpoint( "bounty_invalid_post_does_not_exist") continue logger.info("Starting bounty for post {}!".format(post_id)) new_b = Bounty( post_id=post_obj, amt=amt, activation_time=timezone.now(), ) new_b.save() checkpoint_helper.set_checkpoint("done", action_type="bonty", action_id=post_id) else: logger.error("Invalid action: {}".format(action_details)) checkpoint_helper.set_checkpoint("invalid_action") continue else: # Posts do not include the "action" key to save on memo space logger.info("Action details {}".format(action_details)) if "sig" in action_details: sig = action_details.pop("sig") sig = validators.pre_validate_signature(sig) verifymessage_detail = lnclient.verifymessage( msg=json.dumps(action_details, sort_keys=True), sig=sig, rpcserver=node.rpcserver, mock=settings.MOCK_LN_CLIENT) if not verifymessage_detail["valid"]: checkpoint_helper.set_checkpoint("invalid_signiture") continue pubkey = verifymessage_detail["pubkey"] else: pubkey = "Unknown" if "parent_post_id" in action_details: # Find the parent. try: parent_post_id = int(action_details["parent_post_id"]) parent = Post.objects.get(pk=parent_post_id) except (ObjectDoesNotExist, ValueError): logger.error( "The post parent does not exist: {}".format( action_details)) checkpoint_helper.set_checkpoint("invalid_parent_post") continue title = parent.title tag_val = parent.tag_val else: title = action_details["title"] tag_val = action_details["tag_val"] parent = None user, created = User.objects.get_or_create(pubkey=pubkey) post = Post( author=user, parent=parent, type=action_details["post_type"], title=title, content=action_details["content"], tag_val=tag_val, ) # TODO: Catch failures when post title is duplicate (e.g. another node already saved post) post.save() # New Answer is the trigger for potentian bounty awards if post.type == Post.ANSWER and user != get_anon_user(): award_bounty(question_post=post.parent) # Save tags if "tag_val" in action_details: tags = action_details["tag_val"].split(",") for tag in tags: tag_obj, created = Tag.objects.get_or_create(name=tag) if created: logger.info("Created a new tag: {}".format(tag)) tag_obj.count += 1 post.tag_set.add(tag_obj) tag_obj.save() post.save() checkpoint_helper.set_checkpoint("done", action_type="post", action_id=post.id) # advance global checkpoint new_global_checkpoint = None for add_index in sorted(retry_mini_map.keys()): retry = retry_mini_map[add_index] if retry: break else: logger.info("add_index={} advances global checkpoint".format( add_index)) new_global_checkpoint = add_index if new_global_checkpoint: node.global_checkpoint = new_global_checkpoint node.save() logger.info( "Saved new global checkpoint {}".format(new_global_checkpoint)) processing_wall_time = time.time() - start_time logger.info("Processing node {} took {:.3f} seconds".format( node.node_name, processing_wall_time)) return processing_wall_time
def post(self, request, *args, **kwargs): post = self.get_obj() post = post_permissions(request, post) # The default return url response = HttpResponseRedirect(post.root.get_absolute_url()) if not post.is_editable: logger.warning( "TODO: You may not moderate this post, (Request: %s)", request) return response # Initialize the form class. form = self.form_class(request.POST, pk=post.id) # Bail out on errors. if not form.is_valid(): logger.error("%s, %s", form.errors, request) return response # A shortcut to the clean form data. get = form.cleaned_data.get # These will be used in updates, will bypasses signals. query = Post.objects.filter(pk=post.id) root = Post.objects.filter(pk=post.root_id) action = get('action') if action == (BUMP_POST) and post != post.root: logger.error("Only top-level posts may be bumped! (Request: %s)", request) return response if action == (BUMP_POST): Post.objects.filter(id=post.id).update( lastedit_date=general_util.now()) logger.info("Post bumped (Request: %s)", request) return response if action == (OPEN, TOGGLE_ACCEPT): logger.error( "Only a moderator may open or toggle a post (Request: %s)", request) return response if action == TOGGLE_ACCEPT and post.type == Post.ANSWER: # Toggle post acceptance. post.has_accepted = not post.has_accepted post.save() has_accepted = Post.objects.filter(root=post.root, type=Post.ANSWER, has_accepted=True).count() root.update(has_accepted=has_accepted) return response if action == MOVE_TO_ANSWER and post.type == Post.COMMENT: # This is a valid action only for comments. logger.info("Moved post to answer (Request: %s)", request) query.update(type=Post.ANSWER, parent=post.root) root.update(reply_count=F("reply_count") + 1) return response if action == MOVE_TO_COMMENT and post.type == Post.ANSWER: # This is a valid action only for answers. logger.info("Moved post to answer (Request: %s)", request) query.update(type=Post.COMMENT, parent=post.root) root.update(reply_count=F("reply_count") - 1) return response # Some actions are valid on top level posts only. if action in (CLOSE_OFFTOPIC, DUPLICATE) and not post.is_toplevel: logger.warning( "You can only close or open a top level post (Request: %s)", request) return response if action == OPEN: query.update(status=Post.OPEN) logger.info("Opened post: %s (Request: %s)", post.title, request) return response if action in CLOSE_OFFTOPIC: query.update(status=Post.CLOSED) logger.info("Closed post: %s (Request: %s)", post.title, request) content = html_util.render(name="messages/offtopic_posts.html", user=post.author, comment=get("comment"), post=post) comment = Post(content=content, type=Post.COMMENT, parent=post) comment.save() return response if action == CROSSPOST: content = html_util.render(name="messages/crossposted.html", user=post.author, comment=get("comment"), post=post) comment = Post(content=content, type=Post.COMMENT, parent=post, author=user) comment.save() return response if action == DUPLICATE: query.update(status=Post.CLOSED) posts = Post.objects.filter(id__in=get("dupe")) content = html_util.render(name="messages/duplicate_posts.html", user=post.author, comment=get("comment"), posts=posts) comment = Post(content=content, type=Post.COMMENT, parent=post, author=user) comment.save() return response if action == DELETE: # Delete marks a post deleted but does not remove it. # Remove means to delete the post from the database with no trace. # Posts with children or older than some value can only be deleted not removed # The children of a post. children = Post.objects.filter(parent_id=post.id).exclude( pk=post.id) # The condition where post can only be deleted. delete_only = children or post.age_in_days > 7 or post.vote_count > 1 or ( post.author != user) if delete_only: # Deleted posts can be undeleted by re-opening them. query.update(status=Post.DELETED) logger.info("Deleted post: %s (Request: %s)", post.title, request) response = HttpResponseRedirect(post.root.get_absolute_url()) else: # This will remove the post. Redirect depends on the level of the post. url = "/" if post.is_toplevel else post.parent.get_absolute_url( ) post.delete() logger.info("Removed post: %s (Request: %s)", post.title, request) response = HttpResponseRedirect(url) # Recompute post reply count post.update_reply_count() return response # By this time all actions should have been performed logger.warning( "That seems to be an invalid action for that post. \ It is probably ok! Actions may be shown even when not valid. (Request: %s)", request) return response
class NewAnswer(FormView): """ Creates a new post. """ form_class = ShortForm template_name = "post_edit.html" type_map = dict(answer=Post.ANSWER, comment=Post.COMMENT) post_type = None def get_context_data(self, **kwargs): context = super(NewAnswer, self).get_context_data(**kwargs) context['nodes_list'] = [n["node_name"] for n in ln.get_nodes_list()] return context def post(self, request, *args, **kwargs): """ This gets the initial "new answer" request, before the get method if everything looks good then we generate the memo (with parent post id) and re-direct to preview. If there are errors, we also render this back to the user directly from here. See `if not form.is_valid()` """ parent_post_id = int(self.kwargs['pid']) # URL sets the type for this new post post_type = self.type_map.get(self.post_type) assert post_type in [ Post.ANSWER, Post.COMMENT ], "I only support Answers and Comment types, got: {}".format( post_type) # Find the parent. try: parent = Post.objects.get(pk=parent_post_id) except ObjectDoesNotExist, exc: logger.error( "The post does not exist. Perhaps it was deleted request (Request: %s)", request) raise # Validating the form. form = self.form_class(request.POST) if not form.is_valid(): try: context = self.get_context_data(**kwargs) except Exception as e: logger.exception(e) raise context["form"] = form context["errors_detected"] = True return render(request, self.template_name, context) # Valid forms start here content = form.cleaned_data.get("content") post_preview = PostPreview() post_preview.parent_post_id = parent_post_id post_preview.title = parent.title post_preview.tag_val = parent.tag_val post_preview.tag_value = html_util.split_tags(parent.tag_val) post_preview.status = Post.OPEN post_preview.type = post_type post_preview.content = content post_preview.html = html_util.parse_html(content) post_preview.date = general_util.now() post_preview.memo = post_preview.serialize_memo() post_preview.clean_fields() return HttpResponseRedirect( post_preview.get_absolute_url(memo=post_preview.memo))
class NewPost(FormView): form_class = LongForm template_name = "post_edit.html" def get_context_data(self, **kwargs): context = super(NewPost, self).get_context_data(**kwargs) context['nodes_list'] = [n["node_name"] for n in ln.get_nodes_list()] return context def get(self, request, *args, **kwargs): initial = dict() if "memo" in kwargs: memo = json_util.deserialize_memo(kwargs["memo"]) if "parent_post_id" in memo: expected_memo_keys = ["parent_post_id", "post_type", "content"] self.form_class = ShortForm else: expected_memo_keys = [ "title", "post_type", "tag_val", "content" ] for key in expected_memo_keys: initial[key] = memo[key] else: # Attempt to prefill from GET parameters for key in "title post_type tag_val content".split(): value = request.GET.get(key) if value: initial[key] = value try: context = self.get_context_data(**kwargs) except Exception as e: logger.exception(e) raise context['form'] = self.form_class(initial=initial) context['errors_detected'] = False return render(request, self.template_name, context) def post(self, request, *args, **kwargs): if "memo" in kwargs: post_preview = PostPreview() # Some data comes from memo memo = json_util.deserialize_memo(kwargs["memo"]) if "parent_post_id" in memo: self.form_class = ShortForm parent_post_id = memo["parent_post_id"] # Find the parent. try: parent = Post.objects.get(pk=parent_post_id) except ObjectDoesNotExist, e: msg = "The post does not exist. Perhaps it was deleted request (Request: %s)".format( request) logger.error(msg) raise PostViewException(msg) post_preview.parent_post_id = parent_post_id post_preview.title = parent.title post_preview.tag_val = parent.tag_val post_preview.tag_value = html_util.split_tags(parent.tag_val) else: post_preview.title = memo["title"] post_preview.tag_val = memo["tag_val"] post_preview.tag_value = html_util.split_tags(memo["tag_val"]) post_preview.status = Post.OPEN post_preview.type = memo["post_type"] post_preview.date = datetime.utcfromtimestamp( memo["unixtime"]).replace(tzinfo=utc) # Validating the form. form = self.form_class(request.POST) if not form.is_valid(): try: context = self.get_context_data(**kwargs) except Exception as e: logger.exception(e) raise context['form'] = form context['errors_detected'] = True return render(request, self.template_name, context) if "memo" in kwargs: # Only new data comes from the HTML form post_preview.content = form.cleaned_data.get("content") post_preview.html = html_util.parse_html(post_preview.content) else: # Valid forms start here. data = form.cleaned_data # All data comes from the HTML form post_preview = PostPreview(title=data.get('title'), content=data.get('content'), tag_val=data.get('tag_val'), type=int(data.get('post_type')), date=general_util.now()) return HttpResponseRedirect( post_preview.get_absolute_url(memo=post_preview.serialize_memo()))
def rising_star(user): # The user joined no more than three months ago cond = general_util.now() < user.profile.date_joined + timedelta(weeks=15) cond = cond and Post.objects.filter(author=user, is_fake_test_data=False).count() > 50 return wrap_list(user, cond)
def save(self, **kwargs): self.subject = self.subject[:self.MAX_SIZE] self.sent_at = self.sent_at or general_util.now() super(MessageBody, self).save(**kwargs)