def api_get_claim_by_theme(request): forum = Forum.objects.get(id=request.session['forum_id']) response = {} context = {} theme_id = int(request.REQUEST.get('theme_id')) claim_category = request.REQUEST.get('claim_category') if (theme_id > 0): claims = Claim.objects.filter(theme_id=theme_id, claim_category=claim_category) else: claims = Claim.objects.filter(forum=forum, claim_category=claim_category) context["claims"] = [] for claim in claims: item = {} item['date'] = utils.pretty_date(claim.updated_at) item['created_at'] = utils.pretty_date(claim.created_at) item['created_at_used_for_sort'] = claim.created_at item['content'] = unicode( ClaimVersion.objects.filter(claim_id=claim.id)[0]) item['id'] = claim.id item[ 'author_name'] = claim.author.first_name + " " + claim.author.last_name item['is_author'] = (request.user == claim.author) item['highlight_ids'] = "" for highlight in claim.source_highlights.all(): item['highlight_ids'] += (str(highlight.id) + " ") item['highlight_ids'].strip(" ") context["claims"].append(item) context['claims'].sort(key=lambda x: x["created_at_used_for_sort"], reverse=True) response['workbench_claims'] = render_to_string("workbench-claims.html", context) return HttpResponse(json.dumps(response), mimetype='application/json')
def api_load_claim_list_partial(request): response = {} context = {} context['highlights'] = [] highlight_id = request.REQUEST.get("highlight_id") highlightClaims = HighlightClaim.objects.filter(highlight_id=highlight_id) context["claims"] = [] for highlightClaim in highlightClaims: claim = highlightClaim.claim item = {} item['date'] = utils.pretty_date(claim.updated_at) item['content'] = unicode( ClaimVersion.objects.filter( claim_id=claim.id)[0]) + " (" + claim.claim_category + ")" item['id'] = claim.id item[ 'author_name'] = claim.author.first_name + " " + claim.author.last_name item['is_author'] = (request.user == claim.author) item['highlight_ids'] = "" for highlight in claim.source_highlights.all(): item['highlight_ids'] += (str(highlight.id) + " ") item['highlight_ids'].strip(" ") context["claims"].append(item) response['workbench_claims'] = render_to_string("workbench-claims.html", context) return HttpResponse(json.dumps(response), mimetype='application/json')
def getAttr(self): attr = {} attr['created_at'] = time.mktime(self.created_at.timetuple()) attr['created_at_pretty'] = utils.pretty_date(self.created_at) attr['id'] = self.id attr['start'] = self.start_pos attr['end'] = self.end_pos attr['author_id'] = self.author.id attr['context_id'] = self.context.id attr['text'] = self.text attr['is_nugget'] = self.is_nugget attr['is_used'] = HighlightClaim.objects.filter( highlight_id=self.id).count() > 0 attr[ 'author_name'] = self.author.first_name + " " + self.author.last_name attr['theme'] = "" attr['theme_id'] = "" try: tag = Tag.objects.get(highlight_ptr=self) attr['content'] = tag.content attr['type'] = 'tag' except: # type of the first entry under this highlight # claim has priority if self.posts_of_highlight.exists(): attr['type'] = self.posts_of_highlight.order_by( 'updated_at')[0].content_type else: # ghost highlight with no entries attached attr['type'] = 'claim' return attr
def getExcerpt(self, forum): attr = {} # for efficiency, don't inherit at all attr['version_id'] = self.id attr['updated_at_full'] = self.updated_at attr['updated_at'] = utils.pretty_date(self.updated_at) attr['excerpt'] = self.content[:50] + '...' return attr
def getAttr(self, forum): attr = {} attr['id'] = self.id attr['title'] = self.title attr['segmented_text'] = utils.segment_text(self.content) attr['updated_at'] = utils.pretty_date(self.updated_at) attr['updated_at_full'] = self.updated_at return attr
def getAttrAdmin(self): attr = {} attr['id'] = self.id attr['title'] = self.title attr['author_name'] = self.author.get_full_name() attr['content'] = self.content attr['updated_at'] = utils.pretty_date(self.updated_at) attr['updated_at_full'] = self.updated_at attr['order'] = self.order return attr
def getAttr(self): attr = {} attr['id'] = self.id attr['author_id'] = self.author.id attr['author_name'] = self.author.get_full_name() attr['author_intro'] = UserInfo.objects.get( user=self.author).description attr['text'] = self.text attr['created_at_full'] = self.created_at # for sorting attr['created_at_pretty'] = utils.pretty_date(self.created_at) return attr
def get(self): q = PullRequest.all() q.order("last_updated") # This is the request that wasn't updated for the longest time: p = q.get() if p is None: last_update = None last_update_pretty = "never" else: last_update = p.last_updated last_update_pretty = pretty_date(last_update) q = PullRequest.all() q.filter("state =", "open") q.order("last_updated") # This is the open request that wasn't updated for the longest time: p = q.get() if p is None: last_quick_update = None last_quick_update_pretty = "never" else: last_quick_update = p.last_updated last_quick_update_pretty = pretty_date(last_quick_update) p_mergeable = PullRequest.all() p_mergeable.filter("mergeable =", True) p_mergeable.filter("state =", "open") p_mergeable.order("-created_at") p_nonmergeable = PullRequest.all() p_nonmergeable.filter("mergeable =", False) p_nonmergeable.filter("state =", "open") p_nonmergeable.order("-created_at") self.render( "index.html", { "pullrequests_mergeable": p_mergeable, "pullrequests_nonmergeable": p_nonmergeable, "last_update": last_update, "last_update_pretty": last_update_pretty, "last_quick_update": last_quick_update, "last_quick_update_pretty": last_quick_update_pretty, }, )
def to_form(self): status_list = ['Won', 'Tied', 'Lost', 'Forfeit'] host_status = status_list[self.host_result] oppoent_status = status_list[self.oppoent_result] date_end = pretty_date(self.end_date) return ScoreForm(host_name = self.host.get().name, host_result = host_status, oppoent_name = self.oppoent.get().name, oppoent_result = oppoent_status, end_date = date_end, game = self.game.urlsafe())
def get(self): guestbook_name = self.request.get("guestbook_name") greetings_query = Greeting.all().ancestor(guestbook_key(guestbook_name)).order("-date") greetings = greetings_query.fetch(10) # Set the pretty date. for greeting in greetings: greeting.pretty_date = pretty_date(greeting.date) template_values = {"greetings": greetings} path = os.path.join(os.path.dirname(__file__), "index.html") self.response.out.write(template.render(path, template_values))
def to_form(self): """Returns a GameForm representation of the Game""" status_list = ['Active', 'Completed'] status = status_list[self.status] date_start = pretty_date(self.start_date) form = GameForm(urlsafe_key = self.key.urlsafe(), setup = str(self.setup), host_name = self.host, oppoent_name = self.oppoent, turn = self.turn.get().name, status = status, start_date = date_start) return form
def getAttr(self): attr = {} try: attr['role'] = Role.objects.get(user=self.source, forum=self.forum).role except: attr['role'] = VISITOR_ROLE attr['id'] = self.id attr['user_id'] = self.source.id attr['user_name'] = self.source.get_full_name() attr['content'] = self.content attr['created_at'] = utils.pretty_date(self.created_at) return attr
def get(self): q = PullRequest.all() q.order("last_updated") # This is the request that wasn't updated for the longest time: p = q.get() if p is None: last_update = None last_update_pretty = "never" else: last_update = p.last_updated last_update_pretty = pretty_date(last_update) q = PullRequest.all() q.filter("state =", "open") q.order("last_updated") # This is the open request that wasn't updated for the longest time: p = q.get() if p is None: last_quick_update = None last_quick_update_pretty = "never" else: last_quick_update = p.last_updated last_quick_update_pretty = pretty_date(last_quick_update) p_mergeable = PullRequest.all() p_mergeable.filter("mergeable =", True) p_mergeable.filter("state =", "open") p_mergeable.order("-created_at") p_nonmergeable = PullRequest.all() p_nonmergeable.filter("mergeable =", False) p_nonmergeable.filter("state =", "open") p_nonmergeable.order("-created_at") self.render( "index.html", { "pullrequests_mergeable": p_mergeable, "pullrequests_nonmergeable": p_nonmergeable, "last_update": last_update, "last_update_pretty": last_update_pretty, "last_quick_update": last_quick_update, "last_quick_update_pretty": last_quick_update_pretty, })
def getAttr(self): attr = {} attr['id'] = self.id attr['title'] = self.title attr['description'] = self.description attr['num_sections'] = self.sections.count() try: # update time is the latest updated section in it update_time = self.sections.order_by('-updated_at')[0].updated_at attr['updated_at'] = utils.pretty_date(update_time) attr['updated_at_full'] = update_time except: pass return attr
def archive_list(): keyword = request.args.get("keyword", "") db = DB() if keyword: session_list = db._select2dic("scraper_archive", where_values=[keyword], order="created desc") else: session_list = db._select2dic("scraper_archive", order="created desc") data = [] for each in session_list: each['created'] = pretty_date(int(each['created'])) data.append(each) return render_template("archive_list.html", session_list=data, keyword=keyword)
def get(self): guestbook_name = self.request.get('guestbook_name') greetings_query = Greeting.all().ancestor( guestbook_key(guestbook_name)).order('-date') greetings = greetings_query.fetch(10) # Set the pretty date. for greeting in greetings: greeting.pretty_date = pretty_date(greeting.date) template_values = { 'greetings': greetings, } path = os.path.join(os.path.dirname(__file__), 'index.html') self.response.out.write(template.render(path, template_values))
def get_statement_version(request): response = {} context = {} claim_version_id = request.REQUEST.get('claim_version_id') statementVersions = StatementVersion.objects.filter( claim_version_id=claim_version_id).order_by('-updated_at') context['versions'] = [] for statementVersion in statementVersions: item = {} item['text'] = statementVersion.text item['updated_at'] = utils.pretty_date(statementVersion.updated_at) item[ 'author'] = statementVersion.author.first_name + " " + statementVersion.author.last_name context['versions'].append(item) response['html'] = render_to_string("phase1/statement-versions.html", context) return HttpResponse(json.dumps(response), mimetype='application/json')
def getAttr(self): attr = { 'id': self.id, 'sender': self.sender.get_full_name(), 'receiver': self.receiver.get_full_name(), 'content': self.content, 'created_at_full': self.created_at, # for sorting 'created_at': utils.pretty_date(self.created_at), 'content_type': self.content_type, } if 'facilitation' in self.content_type or 'action' in self.content_type: attr['important'] = 'important' if not self.unread: attr['is_read'] = 'read' if self.is_done: attr['is_done'] = 'done' if self.target_entry: attr['source_id'] = self.target_entry.id return attr
def getAttr(self, forum): attr = {} attr['id'] = self.id attr['user_id'] = self.user.id attr['user_name'] = self.user.get_full_name() attr['author_intro'] = UserInfo.objects.get(user=self.user).description try: attr['author_role'] = Role.objects.get(user=self.user, forum=forum).role except: attr['author_role'] = VISITOR_ROLE try: attr['author_initial'] = str.upper( str(self.user.first_name[0]) + str(self.user.last_name[0])) except: attr['author_initial'] = '' attr['created_at_full'] = self.created_at # for sorting attr['created_at'] = utils.pretty_date(self.created_at) attr['collective'] = self.collective return attr
def index(): keyword = request.args.get("keyword", "") page = request.args.get("page", 1, type=int) offset = (page - 1) * 100 # page, per_page, offset = get_page_args(page_parameter='page', # per_page_parameter='per_page') db = DB() total = 0 if keyword: rr = db._execute( "select count(1) from scraper_craigslist where keyword = ? and is_delete = 0 and is_archive = 0", values=[keyword]) for row in rr: total = row[0] session_list = db._select2dic("scraper_craigslist", where="keyword = ? and is_delete = 0 and is_archive = 0", where_values=[keyword], order="created desc", limit=100, offset=offset) else: rr = db._execute( "select count(1) from scraper_craigslist where keyword is not null and is_delete = 0 and is_archive = 0") for row in rr: total = row[0] session_list = db._select2dic("scraper_craigslist", where="keyword is not null and is_delete = 0 and is_archive = 0", order="created desc", limit=100, offset=offset) pagination = get_pagination(page=page, per_page=100, total=total, record_name='users', format_total=True, format_number=True, ) data = [] for each in session_list: each['created'] = pretty_date(int(each['created'])) data.append(each) return render_template("list2.html", session_list=data, keyword=keyword, pagination=pagination, page=page)
def getAttr(self, forum): attr = {} try: attr['author_role'] = Role.objects.get(user=self.author, forum=forum).role except: attr['author_role'] = VISITOR_ROLE attr['id'] = self.id attr['author_id'] = self.author.id attr['author_name'] = self.author.get_full_name() try: attr['author_initial'] = str.upper( str(self.author.first_name[0]) + str(self.author.last_name[0])) except: attr['author_initial'] = '' attr['content'] = self.content attr['created_at_full'] = self.created_at # for sorting attr['updated_at'] = utils.pretty_date(self.updated_at) attr['updated_at_full'] = self.updated_at attr['is_deleted'] = self.is_deleted attr['collective'] = self.collective return attr
def getdata(): keyword = request.args.get("keyword") db = DB() if keyword: session_list = db._select2dic("scraper_craigslist", where="keyword = ? and is_delete = 0 and is_archive = 0", where_values=[keyword], order="created desc", limit=100, offset=0) else: session_list = db._select2dic("scraper_craigslist", where="keyword is not null and is_delete = 0 and is_archive = 0", order="created desc", limit=100, offset=0) data = [] for each in session_list: each['created'] = pretty_date(int(each['created'])) data.append(each) html = [] for each in data: item_html = ''' <tr> <td>%s</td> <td>%s</td> <td><a id="outurl" href="%s" target="_blank">%s</a></td> <td>%s</td> <td><input id="comments" name="comments" size="30" type="text" data-id="%s" value="%s"></td> <td>%s</td> <td> <a href="#" id="delete" data-id="%s">delete</a> <a href="#" id="archive" data-id="%s">archive</a> <a href="#" id="save" data-id="%s">save</a> </td> </tr> ''' % ( each['keyword'], each['source'], each['url'], each['title'], each['location'], each['id'], each['comments'] if each['comments'] else "", each['created'], each['id'], each['id'], each['id']) html.append(item_html) return jsonify({"html": ''.join(html)})
def get_claim_activity(request): response = {} action = request.REQUEST.get('action') if action == 'load-thread': print "slot_id", request.REQUEST.get('slot_id') claim = Claim.objects.get(id=request.REQUEST.get('slot_id')) forum = Forum.objects.get(id=request.session['forum_id']) context = {} context['entries'] = [] posts = claim.comments_of_entry.all() for post in posts: for comment in post.getTree(exclude_root=False): context['entries'].append(comment.getAttr(forum)) # performed rewording for version in claim.versions.all(): version_info = version.getAttr(forum) version_info["author_intro"] = version.getAuthor()["author_intro"] context['entries'].append(version_info) posts = version.comments_of_entry.all() for post in posts: for comment in post.getTree(exclude_root=False): context['entries'].append(comment.getAttr(forum)) for claimNuggetAssignment in ClaimNuggetAssignment.objects.filter( claim=claim): nugget_assignment_info = claimNuggetAssignment.getAttr(forum) nugget_id = nugget_assignment_info["nugget_id"] nugget_assignment_info["nugget_content"] = Highlight.objects.get( id=nugget_id).text context['entries'].append(nugget_assignment_info) for root_comment in ClaimComment.objects.filter(claim=claim, parent__isnull=True): entry = {} entry["root_comment"] = root_comment entry["id"] = root_comment.id entry["is_answered"] = root_comment.is_answered entry["created_at_full"] = root_comment.created_at entry['comments'] = root_comment.get_descendants(include_self=True) entry["entry_type"] = "claim_" + str(root_comment.comment_type) entry[ "author_name"] = root_comment.author.first_name + " " + root_comment.author.last_name entry["author_role"] = Role.objects.get(user=root_comment.author, forum=forum).role entry["author_intro"] = UserInfo.objects.get( user=claim.author).description entry["author_id"] = root_comment.author.id entry["created_at_pretty"] = utils.pretty_date( root_comment.created_at) context['entries'].append(entry) # slot assignment events # slotassignments = SlotAssignment.objects.filter(slot=slot) # for slotassignment in slotassignments: # context['entries'].append(slotassignment.getAttr(forum)) context['entries'] = sorted(context['entries'], key=lambda en: en['created_at_full'], reverse=True) context['nuggets'] = [] highlightClaims = HighlightClaim.objects.filter(claim_id=claim.id) for highlightClaim in highlightClaims: context['nuggets'].append(highlightClaim.highlight.getAttr()) context['claim'] = claim response['html'] = render_to_string('phase2/claim_detail.html', context) return HttpResponse(json.dumps(response), mimetype='application/json') return HttpResponse(json.dumps(response), mimetype='application/json')
def prettify( s ): return pretty_date( s )
def prettify(s): return pretty_date(s)
def created_time_ago(self): """Return the time ago this was created.""" return pretty_date(self.created_at)
def get_datetime(self): return utils.pretty_date(self.created_at)
def prettify(value): return pretty_date(value)
def get_doc_coverage(request): # filters selected = request.REQUEST.get("selected") author_ids = request.REQUEST.get('author_ids').split(" ") theme_ids = request.REQUEST.get('theme_ids').split(" ") doc_ids = request.REQUEST.get('doc_ids').split(" ") time_upper_bound = request.REQUEST.get('time_upper_bound') time_upper_bound = datetime.strptime(time_upper_bound, "%Y %m %d %H %M") time_lower_bound = request.REQUEST.get('time_lower_bound') time_lower_bound = datetime.strptime(time_lower_bound, "%Y %m %d %H %M") # initialize response = {} context = {} forum = Forum.objects.get(id=request.session['forum_id']) docs = Doc.objects.filter(forum=forum) # add author arrow response["author_activity_map"] = {} for author_id in author_ids: viewlogs = ViewLog.objects.filter(created_at__lte=time_upper_bound, created_at__gte=time_lower_bound, doc_id__in=doc_ids, author_id=author_id) if viewlogs.count() >= 2: arr1 = viewlogs.order_by("-created_at")[0].heatmap.split(",") last_doc_id = viewlogs.order_by("-created_at")[0].doc.id arr2 = viewlogs.filter(doc_id=last_doc_id).order_by( "-created_at")[1].heatmap.split(",") if len(arr1) == len(arr2): l1 = np.array([int(x) for x in arr1]) l2 = np.array([int(x) for x in arr2]) l = (l1 - l2).tolist() item = {} item["doc_id"] = viewlogs.order_by("-created_at")[0].doc_id first = l.index(1) last = len(l) - l[::-1].index(1) - 1 item["work_on"] = (first + last) / 2 item["author_name"] = User.objects.get( id=author_id).first_name + " " + User.objects.get( id=author_id).last_name item["time"] = utils.pretty_date( viewlogs.order_by("-created_at")[0].created_at) response["author_activity_map"][author_id] = item # nuggetmap response["nuggetmaps"] = {} nuggetmaps = NuggetMap.objects.filter(created_at__lte=time_upper_bound, created_at__gte=time_lower_bound) if (nuggetmaps.count() != 0): for doc_id in doc_ids: length = get_doc_length(forum, doc_id) # length is the numebr of chars length = length / num_word_for_each_unit # num of segments within a doc distribution = np.array([0] * length) nuggetmaps2 = nuggetmaps.filter(doc_id=doc_id) if (nuggetmaps2.count() != 0 and author_ids[0] != "" and theme_ids[0] != ""): for author_id in author_ids: for theme_id in theme_ids: nuggetmaps3 = nuggetmaps2.filter(author_id=author_id, theme_id=theme_id) if (nuggetmaps3.count() != 0): arr = nuggetmaps3.order_by( "-created_at")[0].distribution.split(",") l = np.array([int(x) for x in arr]) distribution = distribution + l response["nuggetmaps"][str(doc_id)] = {} response["nuggetmaps"][str( doc_id)]["distribution"] = distribution.tolist() response["nuggetmaps"][str(doc_id)]["doc_name"] = Doc.objects.get( id=doc_id).title # viewlog viewlogs = ViewLog.objects.all() viewlogs = viewlogs.filter(created_at__lte=time_upper_bound).filter( created_at__gte=time_lower_bound) response["viewlogs"] = {} for doc_id in doc_ids: viewlogs2 = viewlogs.filter(doc_id=doc_id) length = get_doc_length(forum, doc_id) length = length / num_word_for_each_unit # length was the numebr of chars heatmap = np.array([0] * length) if (author_ids[0] != ""): for author_id in author_ids: viewlogs3 = viewlogs2.filter(author_id=author_id) if (viewlogs3.count() != 0): arr = viewlogs3.order_by("-created_at")[0].heatmap.split( ",") l = np.array([int(x) for x in arr]) heatmap = heatmap + l response["viewlogs"][str(doc_id)] = heatmap.tolist() response["docs"] = [] for doc_id in doc_ids: item = {} item["id"] = "doc-" + str(doc_id) item["name"] = str(Doc.objects.get(id=doc_id).title) response["docs"].append(item) return HttpResponse(json.dumps(response), content_type='application/json')
def human_time(s): """ 2014-10-11T08:53:18.392370 """ return pretty_date(dateutil.parser.parse(s))
params, signals = parse_logs(args.logs) bbos = pd.read_csv('logs/bbos.csv.gz', parse_dates=['ts']) trds = pd.read_csv('logs/trds.csv.gz', parse_dates=['ts']) trds = trds.set_index('ts') data = dict() data['figures'] = [build_graph(s, params, bbos, trds) for s in signals] normalized = [normalize_signal(s, params, trds) for s in signals] data['longs'], data['shorts'] = normalized_graphs(normalized) outcomes = [compute_outcome(s, trds) for s in signals] outcomes = pd.DataFrame.from_dict([x for xs in outcomes for x in xs]) data['longs_distn'] = outcomes_graphs('long', outcomes) data['shorts_distn'] = outcomes_graphs('short', outcomes) with open('reports/template.html') as fh: template = jinja2.Template(fh.read()) min_date = min([s['ts'] for s in signals]) max_date = max([s['ts'] for s in signals]) params['start'] = pretty_ts(min_date) params['end'] = pretty_ts(max_date) data['params'] = params filename = 'reports/report.{}.{}.html' filename = filename.format(pretty_date(min_date), pretty_date(max_date)) with open(filename, 'w') as fh: fh.write(template.render(data=data)) rebuild_index()
def pretty_date(datetime): return utils.pretty_date(datetime)
def get_deltastring(self): return pretty_date(self.date)