def watches_list(r): columns = [ 'Name', 'Last Ping', 'Cycle', 'Grace', 'Will Alarm', 'Word', 'Status', ] records = [] for watch in Watch.objects.filter(user=r.user).order_by('created'): records.append([ watch.name, watch.last_ping.isoformat() if watch.last_ping else 'never', # A hack to get human-friendly names for timedelta objects. Simply # subtract the timedelta from now in order to let naturaltime # create the words from a datetime object, then chop off the " ago" # suffix at the end of the string to get the interesting portion. naturaltime(timezone.now() - watch.cycle)[:-4], naturaltime(timezone.now() - watch.grace)[:-4], naturaltime(watch.alarm_threshold()) if watch.last_ping else 'never', watch.word, watch.status(), ]) data = { 'columns': columns, 'records': records, } response = HttpResponse(json.dumps(data)) response['Content-Type'] = 'application/json' return response
def search_article(self, request, q, page_index, page_size, *args, **kwargs): from ..articles.models import Article articles = Article.objects.raw("""SELECT a.*, u.nickname as author_name, b.name as book_name FROM articles_article as a, auth_user as u, articles_book as b WHERE a.author_id=u.id AND a.book_id=b.id AND a.enable=1 AND a.title LIKE '%%%s%%' ORDER BY date_updated DESC """ % q)[page_index * page_size: page_index * page_size + page_size] article_list = [] for article in articles: d = article.__dict__ d.update({ 'author': { 'id': d['author_id'] }, 'book': { 'id': d['book_id'], 'name': d['book_name'] }, 'date_created': humanize.naturaltime(d['date_created']), 'date_updated': humanize.naturaltime(d['date_updated']), }) article_list.append(d) return article_list
def post(request, post_id): p = Post.objects.get(pk=post_id) comments = sorted(p.comment_set.all().order_by("timestamp"), key=lambda x: x.parent) # If user came here because of notification, lets mark it as read note = request.GET.get('note') if note: try: notification = Notification.objects.get(pk=int(note)) if request.user.is_authenticated and \ notification.recipient == request.user: notification.mark_as_read() except: pass # Pretty shitty situation with top level comments # Since they all have parent=0 they will be incorrectly grouped # in the begining of the list nested_comments = {} all_comments = [] top_comment_number = 1 for comment in comments: nested_comments.setdefault(comment, []) if comment.parent == 0: setattr(comment, "number", top_comment_number) top_comment_number += 1 all_comments.append(comment) for cmt2 in comments: if cmt2.parent == comment.id: nested_comments[comment].append(cmt2) for parent, children in nested_comments.iteritems(): children.reverse() for child in children: all_comments.insert(all_comments.index(parent)+1, child) # Lets humanize the dates today = datetime.today() if today - p.timestamp.replace(tzinfo=None) < timedelta(days=1): p.timestamp = naturaltime(p.timestamp) p.timestamp = convert_time_to_arabic(p.timestamp) # If it was in one day, so did its comments for comment in all_comments: comment.timestamp = naturaltime(comment.timestamp) comment.timestamp = convert_time_to_arabic(comment.timestamp) else: for comment in all_comments: if today - comment.timestamp.replace(tzinfo=None) < timedelta(days=1): comment.timestamp = naturaltime(comment.timestamp) comment.timestamp = convert_time_to_arabic(comment.timestamp) commenters = set() for comment in comments: commenters.add(comment.user) c = {} c.update(csrf(request)) c['post'] = p c['comments'] = all_comments c['commenters'] = commenters return render(request, 'post.html', c)
def to_dict(self): json_dict = self.__dict__ json_dict.update({ "owner": self.owner, "author_name": self.author_name if not not self.author_name else str(self.owner), "date_created": humanize.naturaltime(self.date_created), "date_updated": humanize.naturaltime(self.date_updated) }) return json_dict
def set_file_sizes(self, model, file_field, size_field, date_field): """ Scan through all model rows that do not have their sizes set and set them. :return: the total storage used by all files referenced by rows in the model """ rows_to_set = model.objects.filter( **{file_field+'__isnull': False, size_field+'__isnull': True}).exclude( **{file_field: ''}).exclude( **{date_field: None}).annotate(extra__date=F(date_field)) model_name = getattr(model, '_meta').model_name min_missing_date = max_missing_date = None missing_count = 0 for row in rows_to_set: f = getattr(row, file_field) try: if isinstance(f, FieldFile): file_size = f.size else: file_size = self.scan_folder_size(f) except OSError as ex: if ex.errno != errno.ENOENT: raise file_size = 0 row_date = row.extra__date file_name = os.path.relpath(ex.filename, settings.MEDIA_ROOT) setattr(row, file_field, '') logger.warn('Missing %s file %r from %s.', model_name, str(file_name), naturaltime(row_date)) if min_missing_date is None or row_date < min_missing_date: min_missing_date = row_date if max_missing_date is None or max_missing_date < row_date: max_missing_date = row_date missing_count += 1 setattr(row, size_field, file_size) row.save() if missing_count: start_text = naturaltime(min_missing_date) end_text = naturaltime(max_missing_date) date_range = (start_text if start_text == end_text else start_text + ' to ' + end_text) logger.error('Missing %d %s file%s from %s.', missing_count, model_name, pluralize(missing_count), date_range) # Get the total amount of active storage recorded. return model.objects.exclude( **{file_field: ''}).exclude( # Already purged. **{file_field: None}).aggregate( # Not used. models.Sum(size_field))[size_field + "__sum"] or 0
def getUserActivity(user): ret = "" if user.date_joined.year == user.last_login.year and user.date_joined.month == user.last_login.month and user.date_joined.day == user.last_login.day and user.date_joined.hour == user.last_login.hour and user.date_joined.minute == user.last_login.minute: ret = "%s%s" % ("Registered ", naturaltime(user.date_joined)) else: ret = "%s%s" % ("Logged in ", naturaltime(user.last_login)) return mark_safe(ret)
def to_dict(self): json_dict = self.__dict__ json_dict.update({ "book": self.book, "author": self.author, "author_name": self.author_name if not not self.author_name else str(self.author), "date_created": humanize.naturaltime(self.date_created), "date_updated": humanize.naturaltime(self.date_updated) }) if json_dict.has_key('content'): json_dict.pop('content') return json_dict
def as_json(self, wrap="", extra={}): q_data = {k: self.__dict__[k] for k in ('version', 'id', 'status')} q_data["creator"] = self.creator.username q_data["modifier"] = self.modifier.username q_data["created"] = humanize.naturaltime(self.created) q_data["modified"] = humanize.naturaltime(self.modified) q_data["status_label"] = self.status_labels[self.status] q_data["status_css"] = self.status_css_classes[self.status] for k,v in extra.items(): q_data[k] = v if(len(wrap) > 0): return json.dumps({wrap: q_data}) return json.dumps(q_data)
def pillbox_sync_time(): last = PillBoxData.objects.all().order_by('-updated_at')[:1] try: return naturaltime(last[0].updated_at) except: return 'N/A'
def format_timestamp(dt): if dt is None: return '' return u"{} {} ({})".format( dt.strftime(settings.SITE.date_format_strftime), dt.strftime(settings.SITE.time_format_strftime), naturaltime(dt))
def printed(self, ar): if ar is None: return '' ex = self.printed_by if ex is None: return '' return ar.obj2html(ex, naturaltime(ex.build_time))
def timeago(dt, threshold_hours=None): delta = now() - dt threshold_hours = threshold_hours or settings.AGO_THRESHOLD_HOURS if get_lang('en') == 'en' and delta.days == 0 and \ 0 <= delta.total_seconds() / 3600 < threshold_hours: return naturaltime(dt) return timestr(dt)
def get_notifications( request, latest_id=None, is_viewed=False, max_results=10): notifications = models.Notification.objects.filter( Q(subscription__settings__user=request.user) | Q(user=request.user), ) if is_viewed is not None: notifications = notifications.filter(is_viewed=is_viewed) total_count = notifications.count() if latest_id is not None: notifications = notifications.filter(id__gt=latest_id) notifications = notifications.order_by('-id') notifications = notifications.prefetch_related( 'subscription', 'subscription__notification_type') from django.contrib.humanize.templatetags.humanize import naturaltime return {'success': True, 'total_count': total_count, 'objects': [{'pk': n.pk, 'message': n.message, 'url': n.url, 'occurrences': n.occurrences, 'occurrences_msg': _('%d times') % n.occurrences, 'type': n.subscription.notification_type.key if n.subscription else None, 'since': naturaltime(n.created)} for n in notifications[:max_results]]}
def set_options(self, *args, **kwargs): self.url = 'http://campaignfinance.cdn.sos.ca.gov/dbwebexport.zip' self.data_dir = get_download_directory() os.path.exists(self.data_dir) or os.mkdir(self.data_dir) self.zip_path = os.path.join(self.data_dir, 'calaccess.zip') self.tsv_dir = os.path.join(self.data_dir, "tsv/") self.csv_dir = os.path.join(self.data_dir, "csv/") os.path.exists(self.csv_dir) or os.mkdir(self.csv_dir) if kwargs['download']: self.download_metadata = self.get_download_metadata() self.local_metadata = self.get_local_metadata() prompt_context = dict( last_updated=self.download_metadata['last-modified'], time_ago=naturaltime(self.download_metadata['last-modified']), size=size(self.download_metadata['content-length']), last_download=self.local_metadata['last-download'], download_dir=self.data_dir, ) self.prompt = render_to_string( 'calaccess_raw/downloadcalaccessrawdata.txt', prompt_context, ) self.pbar = progressbar.ProgressBar( widgets=[ progressbar.Percentage(), progressbar.Bar(), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed() ], maxval=self.download_metadata['content-length'] ) self.verbosity = int(kwargs['verbosity'])
def set_options(self, *args, **kwargs): self.url = 'http://campaignfinance.cdn.sos.ca.gov/dbwebexport.zip' self.verbosity = int(kwargs['verbosity']) if kwargs['test_data']: self.data_dir = get_test_download_directory() settings.CALACCESS_DOWNLOAD_DIR = self.data_dir if self.verbosity: self.log("Using test data") else: self.data_dir = get_download_directory() os.path.exists(self.data_dir) or os.makedirs(self.data_dir) self.zip_path = os.path.join(self.data_dir, 'calaccess.zip') self.tsv_dir = os.path.join(self.data_dir, "tsv/") self.csv_dir = os.path.join(self.data_dir, "csv/") os.path.exists(self.csv_dir) or os.makedirs(self.csv_dir) if kwargs['download']: self.download_metadata = self.get_download_metadata() self.local_metadata = self.get_local_metadata() prompt_context = dict( last_updated=self.download_metadata['last-modified'], time_ago=naturaltime(self.download_metadata['last-modified']), size=size(self.download_metadata['content-length']), last_download=self.local_metadata['last-download'], download_dir=self.data_dir, ) self.prompt = render_to_string( 'calaccess_raw/downloadcalaccessrawdata.txt', prompt_context, )
def _fmt_error_info(sync_log): if not sync_log.had_state_error: return u'<span class="label label-success">✓</span>' else: return u'<span class="label label-important">X</span> State error {}'.format( naturaltime(sync_log.error_date), )
def timpestamp(self): for t in self.tweets: t.stamp = naturaltime(t.created) #t.timestamp = naturaltime(t.created) return self.tweets
def page_context(self): context = { 'are_groups': bool(len(self.all_groups)), 'groups_url': reverse('all_groups', args=[self.domain]), 'group_form': self.group_form, 'reset_password_form': self.reset_password_form, 'is_currently_logged_in_user': self.is_currently_logged_in_user, 'data_fields_form': self.custom_data.form, 'can_use_inbound_sms': domain_has_privilege(self.domain, privileges.INBOUND_SMS), 'needs_to_downgrade_locations': ( users_have_locations(self.domain) and not has_privilege(self.request, privileges.LOCATIONS) ), 'demo_restore_date': naturaltime(demo_restore_date_created(self.editable_user)), 'hide_password_feedback': settings.ENABLE_DRACONIAN_SECURITY_FEATURES } if self.commtrack_form.errors: messages.error(self.request, _( "There were some errors while saving user's locations. Please check the 'Locations' tab" )) if self.domain_object.commtrack_enabled or self.domain_object.uses_locations: context.update({ 'commtrack_enabled': self.domain_object.commtrack_enabled, 'uses_locations': self.domain_object.uses_locations, 'commtrack': { 'update_form': self.commtrack_form, }, }) return context
def get_data(self): ballotsubs = BallotSubmission.objects.filter( debate__round__tournament=self.get_tournament(), confirmed=True ).prefetch_related( 'teamscore_set__debate_team', 'teamscore_set__debate_team__team' ).order_by('-timestamp')[:15] results_objects = [] for ballotsub in ballotsubs: winner = '?' loser = '?' for teamscore in ballotsub.teamscore_set.all(): team_str = "{:s} ({:s})".format(teamscore.debate_team.team.short_name, teamscore.debate_team.get_position_display()) if teamscore.win: winner = team_str else: loser = team_str results_objects.append({ 'user': winner + ' beat ' + loser, 'timestamp': naturaltime(ballotsub.timestamp), }) return results_objects
def test_render(self): deadlinebuilder = NodeBuilder.quickadd_ducku()\ .add_subject(short_name='atestcourse', long_name='A Test Course')\ .add_6month_active_period(short_name='testperiod', long_name='Test Period')\ .add_assignment('testassignment', long_name='Test Assignment One')\ .add_group(students=[self.testuser])\ .add_deadline_in_x_weeks(weeks=1) deliverybuilder = deadlinebuilder.add_delivery_x_hours_before_deadline(hours=1) response = self._get_as('testuser') self.assertEquals(response.status_code, 200) selector = htmls.S(response.content) self.assertEquals(selector.count('#objecttableview-table tbody tr'), 1) self.assertEquals( selector.one('#objecttableview-table tbody tr td:nth-child(1) a').alltext_normalized, 'Test Assignment One - Delivery#1') self.assertEquals( selector.one('#objecttableview-table tbody tr td:nth-child(1) a')['href'], crinstance.reverse_cradmin_url( instanceid='devilry_student_group', appname='deliveries', roleid=deliverybuilder.delivery.deadline.assignment_group_id, viewname='deliverydetails', kwargs={'pk': deliverybuilder.delivery.pk})) self.assertEquals( selector.one('#objecttableview-table tbody tr td:nth-child(2)').alltext_normalized, 'A Test Course - Test Period') self.assertEquals( selector.one('#objecttableview-table tbody tr td:nth-child(3)').alltext_normalized, 'atestcourse - testperiod') self.assertEquals( selector.one('#objecttableview-table tbody tr td:nth-child(4)').alltext_normalized, htmls.normalize_whitespace(naturaltime(deliverybuilder.delivery.time_of_delivery)))
def flares_list(r): columns = [ 'Mechanism', 'Config', 'Last Launched', '# Watches', ] records = [] for flare in Flare.objects.filter(user=r.user).order_by('created'): try: last_launch = Launch.objects.filter(flare=flare).latest('created') last_launch = naturaltime(last_launch.created) except Launch.DoesNotExist: last_launch = 'never' records.append([ flare.signal, flare.config, last_launch, flare.watch_set.count(), ]) data = { 'columns': columns, 'records': records, } response = HttpResponse(json.dumps(data)) response['Content-Type'] = 'application/json' return response
def car_json(request, car_id): response = {} car = Car.objects.get(pk=car_id) data_packets = DataPacket.objects.filter(car=car).order_by('-time') if not data_packets: return HttpResponse(json.dumps({ 'success': 'true', 'speed': 0, 'time': 'Not connected.' })) first_packet = data_packets.exclude(battery_volt=None)[0] or None geo_packets = data_packets.exclude(lat=None) response['speed'] = '%0.1f' % first_packet.speed_as_mph() if first_packet.array_current: response['array_current'] = '%0.3f' % (first_packet.array_current or 0.0) response['motor_current'] = '%0.3f' % (first_packet.motor_current or 0.0) response['tritium_current'] = '%0.3f' % (first_packet.tritium_current or 0.0) response['tritium_volt'] = '%0.3f' % (first_packet.tritium_volt or 0.0) response['battery_volt'] = '%0.3f' % (first_packet.battery_volt or 0.0) response['time'] = 'Last updated %s' % naturaltime(first_packet.time) if geo_packets: response['lat'] = geo_packets[0].lat response['lng'] = geo_packets[0].lng response['gps_speed'] = '%0.3f' % (geo_packets[0].gps_speed or 0.0) else: response['lat'] = None response['lng'] = None response['gps_speed'] = None # It is connected if the last packet seen was less than 10 seconds ago response['connected'] = (datetime.now() - first_packet.time) < timedelta(0, 10) response['success'] = 'true' return HttpResponse(json.dumps(response))
def get_notifications(request, latest_id=None, is_viewed=False, max_results=10): notifications = models.Notification.objects.filter(subscription__settings__user=request.user) if not is_viewed is None: notifications = notifications.filter(is_viewed=is_viewed) if not latest_id is None: notifications = notifications.filter(id__gt=latest_id) notifications = notifications.order_by("-id") notifications = notifications.prefetch_related("subscription", "subscription__notification_type") from django.contrib.humanize.templatetags.humanize import naturaltime return { "success": True, "total_count": notifications.count(), "objects": [ { "pk": n.pk, "message": n.message, "url": n.url, "occurrences": n.occurrences, "occurrences_msg": _(u"%d times") % n.occurrences, "type": n.subscription.notification_type.key, "since": naturaltime(n.created), } for n in notifications[:max_results] ], }
def date_formatter(value, tooltip, small): try: value = datetime(value.year, value.month, value.day, value.hour, value.minute, value.second) except AttributeError: return value except ValueError: return value if getattr(value, 'tzinfo', None): now = datetime.now(LocalTimezone(value)) else: now = datetime.now() now = now - timedelta(0, 0, now.microsecond) if value > now: return "Dans le futur" else: delta = now - value # Natural time for today, absolute date after. # Reverse if in tooltip if (delta.days == 0) != tooltip: return naturaltime(value) elif small: return date(value, 'd/m/y à H\hi') else: return date(value, 'l d F Y à H\hi')
def set_options(self, *args, **kwargs): # Check for the user-defined data dir # otherwise put the data in the data dir under the project root data_dir = getattr(settings, 'CALACCESS_DOWNLOAD_DIR', os.path.join(settings.BASE_DIR, 'data')) self.url = 'http://campaignfinance.cdn.sos.ca.gov/dbwebexport.zip' self.data_dir = data_dir self.zip_path = os.path.join(self.data_dir, 'calaccess.zip') self.tsv_dir = os.path.join(self.data_dir, "tsv/") self.csv_dir = os.path.join(self.data_dir, "csv/") os.path.exists(self.csv_dir) or os.mkdir(self.csv_dir) self.metadata = self.get_metadata() self.prompt = PROMPT % ( dateformat(self.metadata['last-modified'], 'N j, Y'), dateformat(self.metadata['last-modified'], 'P'), naturaltime(self.metadata['last-modified']), size(self.metadata['content-length']), self.data_dir, ) self.pbar = progressbar.ProgressBar( widgets=[ progressbar.Percentage(), progressbar.Bar(), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed() ], maxval=self.metadata['content-length'] )
def summary_row(cls, ar, obj, **kw): yield obj.text yield " (" yield ar.obj2html(obj, naturaltime(obj.created)) yield _(" by ") yield ar.obj2html(obj.user) yield ")"
def APIdataactivity(request): resUserActivity = PhotoPost.objects.select_related('user__userprofile').order_by('-PhotoPostID')[:25] lstActivity = [] for act in resUserActivity: timesince_c = naturaltime(act.PhotoPostDateTime) dctActivity = { 'PhotoPhostID':act.PhotoPostID, 'PhotoPostDateTime':act.PhotoPostDateTime.strftime('%Y-%m-%d %H:%M'), 'PhotoPostTimeSince':timesince_c, 'PhotoPost_PlaceID':act.PhotoPost_PlaceID.PlaceID, 'PhotoPost_PlaceName':act.PhotoPost_PlaceID.PlaceName, 'PhotoPost_PlaceLat':act.PhotoPost_PlaceID.PlaceLat, 'PhotoPost_PlaceLong':act.PhotoPost_PlaceID.PlaceLong, 'PhotoPostPhoto':str(act.PhotoPostPhoto), 'PhotoPost_UserProfileID':act.PhotoPost_User.UserProfileID, 'PhotoPost_UserID':act.PhotoPost_User.UserProfile_User.pk, 'PhotoPost_UserName':act.PhotoPost_User.UserProfile_User.username, 'PhotoPost_UserFirstName':act.PhotoPost_User.UserProfile_User.first_name, 'PhotoPost_UserLastName':act.PhotoPost_User.UserProfile_User.last_name, 'PhotoPost_UserAvatar':str(act.PhotoPost_User.UserProfilePhoto), 'PhotoPost_Lat':act.PhotoPost_Lat, 'PhotoPostLong':act.PhotoPostLong, 'PhotoPostDescription':act.PhotoPostDescription, } lstActivity.append(dctActivity) respuesta = {'success':True, 'message':'Success.', 'version':'v1', 'data':lstActivity} return HttpResponse(json.dumps(respuesta), content_type='application/json')
def date_formatter(value, tooltip, small): """ Format a date to an human readable string. :param value: Date to format. :param bool tooltip: if `True`, format date to a tooltip label. :param bool small: if `True`, create a shorter string. :return: """ if not isinstance(value, datetime): return value if getattr(value, 'tzinfo', None): now = datetime.now(get_default_timezone()) else: now = datetime.now() now = now - timedelta(microseconds=now.microsecond) if value > now: return __DATE_FMT_FUTUR else: delta = now - value # Natural time for today, absolute date after. # Reverse if in tooltip if (delta.days == 0) != tooltip: return naturaltime(value) else: return date(value, __ABS_DATE_FMT_SMALL if small else __ABS_DATE_FMT_NORMAL)
def date_formatter(value, tooltip, small): """ Format a date to an human readable string. :param value: Date to format. :param bool tooltip: if `True`, format date to a tooltip label. :param bool small: if `True`, create a shorter string. :return: """ try: value = datetime(value.year, value.month, value.day, value.hour, value.minute, value.second) except (AttributeError, ValueError): # todo : Check why not raise template.TemplateSyntaxError() ? return value if getattr(value, 'tzinfo', None): now = datetime.now(LocalTimezone(value)) else: now = datetime.now() now = now - timedelta(microseconds=now.microsecond) if value > now: return __DATE_FMT_FUTUR else: delta = now - value # Natural time for today, absolute date after. # Reverse if in tooltip if (delta.days == 0) != tooltip: return naturaltime(value) else: return date(value, __ABS_DATE_FMT_SMALL if small else __ABS_DATE_FMT_NORMAL)
def dehydrate(self, bundle): bundle.data['facebook_link'] = bundle.obj.get_link bundle.data['member'] = bundle.obj.feed.persona.owner.name bundle.data['party'] = bundle.obj.feed.persona.owner.current_party.name bundle.data['published_str'] = humanize.naturaltime(bundle.obj.published) bundle.data['content_snippet'] = truncatewords_html(linebreaks(append_separators(urlize(bundle.obj.content))), MAX_LENGTH_FOR_STATUS_CONTENT) if bundle.obj.has_attachment: bundle.data['has_attachment'] = True bundle.data['attachment'] = { 'type': bundle.obj.attachment.type, 'is_photo': bundle.obj.attachment.type == 'photo', 'is_video': bundle.obj.attachment.type == 'video', 'is_youtube_video': bundle.obj.attachment.is_youtube_video, 'is_link': bundle.obj.attachment.type == 'link', 'is_event': bundle.obj.attachment.type == 'event', 'is_music': bundle.obj.attachment.type == 'music', 'is_note': bundle.obj.attachment.type == 'note', 'is_nonetype': not bundle.obj.attachment.type, 'link': bundle.obj.attachment.link, 'picture': bundle.obj.attachment.picture, 'name': bundle.obj.attachment.name, 'caption': bundle.obj.attachment.caption, 'description': bundle.obj.attachment.description, 'source': bundle.obj.attachment.source, 'source_clean': bundle.obj.attachment.source_clean } return bundle
def value_humanize(self): return naturaltime(self.value)
def postSubjectListAll(request): subjects = '' content = [] if request.method == 'GET' and 'phone' in request.GET and 'offset' in request.GET: userPhone = internationalizePhone(request.GET.get('phone')) if User.objects.filter(username=userPhone).exists(): cUser = User.objects.get(username=userPhone) for ugroup in cUser.groups.all(): if ugroup.name == 'Teacher': subjects_ids = [] teacherSubjects = TeacherSubject.objects.filter( user_id=cUser.id) for tsubject in teacherSubjects: subjects_ids.append(tsubject.subject.id) subjects = Subjects.objects.filter(id__in=subjects_ids) elif ugroup.name == 'Student': subjects = Subjects.objects.filter( course_id=cUser.profile.course.id) subject_ids = [] for sub in subjects: subject_ids.append(sub.id) offset_limit = 5 if int(request.GET.get('offset')) == 0: offset = 0 else: offset = int(request.GET.get('offset')) - 1 posts = Descriptions.objects.filter( subject_id__in=subject_ids).values().order_by( '-updated')[int(offset) * offset_limit:(int(offset) * offset_limit) + offset_limit] total = Descriptions.objects.filter( subject_id__in=subject_ids).count() for post in posts: postObj = Descriptions.objects.get(id=post['id']) images = Images.objects.filter( description_id=post['id']).count() user = User.objects.get(id=post['user_id']) for gp in user.groups.all(): group = gp info = {} updated = '' if humanize.naturalday(post['updated']) == 'today': updated = humanize.naturaltime(post['updated']) else: updated = humanize.naturalday(post['updated']) info = { 'id': post['id'], 'display': user.profile.display, 'role': str(group), 'description': post['description'], 'updated': updated, 'recommendation': postObj.recommend, 'comments': postObj.comments, 'images': images, 'user': str(postObj.user.username), } content.append(info) else: pass else: pass return HttpResponse(json.dumps(content))
def latest_updates(organization_id): """ :param request: :param organization_id: the id will always be "correct", whereas name will have all kinds of terribleness: multiple organizations that have the same name in different branches, organizations with generic names etc. Finding an organization by name is tricky. Therefore ID. We're not filtering any further: given this might result in turning a blind eye to low or medium vulnerabilities. :return: """ try: # todo: check that the organization is displayed on the map organization = Organization.objects.all().filter( pk=organization_id).get() except ObjectDoesNotExist: return {} dataset = { "scans": [], "render_date": datetime.now(pytz.utc).isoformat(), "remark": remark, } # semi-union, given not all columns are the same. (not python/django-esque solution) generic_endpoint_scans = list( EndpointGenericScan.objects.filter( endpoint__url__organization=organization, type__in=ENDPOINT_SCAN_TYPES).order_by("-rating_determined_on") [0:60]) url_endpoint_scans = list( UrlGenericScan.objects.filter( url__organization=organization, type__in=URL_SCAN_TYPES).order_by("-rating_determined_on")[0:60]) scans = generic_endpoint_scans + url_endpoint_scans scans = sorted(scans, key=lambda k: getattr(k, "rating_determined_on", datetime.now(pytz.utc)), reverse=True) for scan in scans: scan_type = scan.type calculation = get_severity(scan) if scan_type in URL_SCAN_TYPES: # url scans dataset["scans"].append({ "organization": organization.name, "organization_id": organization.pk, "url": scan.url.url, "service": "%s" % scan.url.url, "protocol": scan_type, "port": "", "ip_version": "", "scan_type": scan_type, "explanation": calculation.get("explanation", ""), # sometimes you dont get one. "high": calculation.get("high", 0), "medium": calculation.get("medium", 0), "low": calculation.get("low", 0), "rating_determined_on_humanized": naturaltime(scan.rating_determined_on), "rating_determined_on": scan.rating_determined_on, "last_scan_humanized": naturaltime(scan.last_scan_moment), "last_scan_moment": scan.last_scan_moment.isoformat(), }) else: # endpoint scans dataset["scans"].append({ "organization": organization.name, "organization_id": organization.pk, "url": scan.endpoint.url.url, "service": "%s/%s (IPv%s)" % (scan.endpoint.protocol, scan.endpoint.port, scan.endpoint.ip_version), "protocol": scan.endpoint.protocol, "port": scan.endpoint.port, "ip_version": scan.endpoint.ip_version, "scan_type": scan_type, "explanation": calculation.get("explanation", ""), # sometimes you dont get one. "high": calculation.get("high", 0), "medium": calculation.get("medium", 0), "low": calculation.get("low", 0), "rating_determined_on_humanized": naturaltime(scan.rating_determined_on), "rating_determined_on": scan.rating_determined_on, "last_scan_humanized": naturaltime(scan.last_scan_moment), "last_scan_moment": scan.last_scan_moment.isoformat(), }) return dataset
def naturaltime(source): return humanize.naturaltime(source)
def render_deadline(self, value): return naturaltime(value) if value else ''
def get_date_joined(self, obj): return naturaltime(obj.date_joined)
def natural_time(self): return naturaltime(self.created_at)
def github_created_on(self, instance): return naturaltime(instance.subscription.contributor_profile.github_created_on)
def get_date(self): return humanize.naturaltime(self.pub_date)
def render_last_updated(self, value): return naturaltime(value) if value else ''
def naturaltime(self): from django.contrib.humanize.templatetags.humanize import naturaltime return naturaltime(self.date)
def get_created(self, obj): return naturaltime(obj.created)
def github_created_on(self, instance): return naturaltime(instance.profile.github_created_on)
def label_from_instance(self, sensor): last_log = sensor.LastLog() if last_log: return '%s (Last report: %s)' % (sensor, naturaltime(last_log.time)) else: return str(sensor)
def get_created(self): from django.contrib.humanize.templatetags.humanize import naturaltime return naturaltime(self.created)
def generate_project_list(user, year, region, section): # Create the HttpResponse object with the appropriate CSV header. response = HttpResponse(content_type='text/csv') response.write(u'\ufeff'.encode( 'utf8')) # BOM (optional...Excel needs it to open UTF-8 file properly) writer = csv.writer(response) status_choices = models.ProjectYear.status_choices fields = [ 'region', 'division', 'project.section|section', 'project.id|Project Id', 'fiscal_year', 'project.title|title', 'Overview', 'Overview word count', 'project.default_funding_source|Primary funding source', 'project.functional_group|Functional group', 'Project leads', 'status', 'updated_at|Last modified date', 'modified_by|Last modified by', 'Last modified description', 'Activity count', 'Staff count', 'Sum of staff FTE (weeks)', 'Sum of costs', ] if in_projects_admin_group(user): qs = ProjectYear.objects.filter(fiscal_year_id=year).distinct() if section != "None": qs = qs.filter(project__section_id=section) elif region != "None": qs = qs.filter( project__section__division__branch__region_id=region) else: sections = utils.get_manageable_sections(user) qs = ProjectYear.objects.filter( project__section__in=sections).distinct() header_row = [ get_verbose_label(ProjectYear.objects.first(), header) for header in fields ] writer.writerow(header_row) for obj in qs: data_row = list() for field in fields: if "division" in field: val = " ---" if obj.project.section: val = obj.project.section.division.tname elif "region" in field: val = " ---" if obj.project.section: val = obj.project.section.division.branch.region.tname elif "leads" in field: val = listrify(obj.get_project_leads_as_users()) elif "updated_at" in field: val = obj.updated_at.strftime("%Y-%m-%d") elif "Last modified description" in field: val = naturaltime(obj.updated_at) elif field == "Overview": val = html2text(nz(obj.project.overview_html, "")) elif field == "Overview word count": val = len( html2text(nz(obj.project.overview_html, "")).split(" ")) elif field == "Activity count": val = obj.activities.count() elif field == "Staff count": val = obj.staff_set.count() elif field == "Sum of staff FTE (weeks)": val = obj.staff_set.order_by("duration_weeks").aggregate( dsum=Sum("duration_weeks"))["dsum"] elif field == "Sum of costs": val = nz(obj.omcost_set.filter(amount__isnull=False).aggregate(dsum=Sum("amount"))["dsum"], 0) + \ nz(obj.capitalcost_set.filter(amount__isnull=False).aggregate(dsum=Sum("amount"))["dsum"], 0) + \ nz(obj.staff_set.filter(amount__isnull=False).aggregate(dsum=Sum("amount"))["dsum"], 0) else: val = get_field_value(obj, field) data_row.append(val) writer.writerow(data_row) return response
def _naturaltime_with_hover(date): return u'<span title="{}">{}</span>'.format(date, naturaltime(date) or '---')
def render_last_human_review(self, value): return naturaltime(value) if value else ''
def relative_date(self): return naturaltime(self.date_posted)
def created(self, obj): return naturaltime(obj.date_created)
def build_github_notification(bounty, event_name, profile_pairs=None): """Build a Github comment for the specified Bounty. Args: bounty (dashboard.models.Bounty): The Bounty to be marketed. event_name (str): The name of the event. profile_pairs (list of tuples): The list of username and profile page URL tuple pairs. Returns: bool: Whether or not the Github comment was posted successfully. """ from dashboard.utils import get_ordinal_repr # hack for circular import issue from dashboard.models import BountyFulfillment, Interest msg = '' usdt_value = "" try: usdt_value = f"({round(bounty.value_in_usdt_now, 2)} USD @ ${round(convert_token_to_usdt(bounty.token_name), 2)}/{bounty.token_name})" if bounty.value_in_usdt_now else "" except Exception: pass # no USD conversion rate available natural_value = round(bounty.get_natural_value(), 4) absolute_url = bounty.get_absolute_url() amount_open_work = "{:,}".format(amount_usdt_open_work()) bounty_owner = f"@{bounty.bounty_owner_github_username}" if bounty.bounty_owner_github_username else "" status_header = get_status_header(bounty) if event_name == 'new_bounty': msg = f"{status_header}__This issue now has a funding of {natural_value} " \ f"{bounty.token_name} {usdt_value} attached to it.__\n\n * If you would " \ f"like to work on this issue you can 'start work' [on the Gitcoin Issue Details page]({absolute_url}).\n " \ "* Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or the " \ f"<a href='https://gitcoin.co/slack'>Gitcoin Slack</a>\n * ${amount_open_work}" \ " more funded OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" if event_name == 'increased_bounty': msg = f"{status_header}__The funding of this issue was increased to {natural_value} " \ f"{bounty.token_name} {usdt_value}.__\n\n * If you would " \ f"like to work on this issue you can claim it [here]({absolute_url}).\n " \ "* If you've completed this issue and want to claim the bounty you can do so " \ f"[here]({absolute_url})\n * Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or " \ f"the <a href='https://gitcoin.co/slack'>Gitcoin Slack</a>\n * ${amount_open_work}" \ " more funded OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" elif event_name == 'killed_bounty': msg = f"{status_header}__The funding of {natural_value} {bounty.token_name} " \ f"{usdt_value} attached to this issue has been **cancelled** by the bounty submitter__\n\n " \ "* Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or the <a href='https://gitcoin.co/slack'>Gitcoin Slack</a>\n * " \ f"${amount_open_work} more funded OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" elif event_name == 'rejected_claim': msg = f"{status_header}__The work submission for {natural_value} {bounty.token_name} {usdt_value} " \ "has been **rejected** and can now be submitted by someone else.__\n\n * If you would " \ f"like to work on this issue you can claim it [here]({absolute_url}).\n * If you've " \ f"completed this issue and want to claim the bounty you can do so [here]({absolute_url})\n " \ "* Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or <a href='https://gitcoin.co/slack'>Gitcoin Slack</a>\n * " \ f"${amount_open_work} more funded OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" elif event_name == 'work_started': interested = bounty.interested.all().order_by('created') # interested_plural = "s" if interested.count() != 0 else "" from_now = naturaltime(bounty.expires_date) started_work = bounty.interested.filter(pending=False).all() # pending_approval = bounty.interested.filter(pending=True).all() bounty_owner_clear = f"@{bounty.bounty_owner_github_username}" if bounty.bounty_owner_github_username else "" approval_required = bounty.permission_type == 'approval' if started_work.exists(): msg = f"{status_header}__Work has been started__.\n\n" else: msg = f"{status_header}__Workers have applied to start work__.\n\n" msg += f"\nThese users each claimed they can complete the work by {from_now}. " \ "Please review their questions below:\n\n" for i, interest in enumerate(interested, start=1): profile_link = f"[{interest.profile.handle}]({interest.profile.url})" action = "started work" if interest.pending: action = 'applied to start work' action += f" _(Funders only: [approve worker]({bounty.approve_worker_url(interest.profile.handle)})" action += f" | [reject worker]({bounty.reject_worker_url(interest.profile.handle)}))_" if not interest.pending and approval_required: action = 'been approved to start work' show_dibs = interested.count( ) > 1 and bounty.project_type == 'traditional' dibs = f" ({get_ordinal_repr(i)} dibs)" if show_dibs else "" msg += f"\n{i}. {profile_link} has {action}{dibs}. " issue_message = interest.issue_message.strip() if issue_message: msg += f"\t\n * Q: " \ f"{issue_message}" msg += "\n\n" elif event_name == 'work_submitted': sub_msg = "" if bounty.fulfillments.exists(): sub_msg = f"\n\n{bounty_owner if bounty_owner else 'If you are the bounty funder,'} " \ "please take a look at the submitted work:\n" for bf in bounty.fulfillments.all(): username = "******" + bf.fulfiller_github_username if bf.fulfiller_github_username else bf.fulfiller_address link_to_work = f"[PR]({bf.fulfiller_github_url})" if bf.fulfiller_github_url else "(Link Not Provided)" sub_msg += f"* {link_to_work} by {username}\n" profiles = "" if profile_pairs: for i, profile in enumerate(profile_pairs, start=1): profiles = profiles + f"\n {i}. [@{profile[0]}]({profile[1]})" profiles += "\n\n" msg = f"{status_header}__Work for {natural_value} {bounty.token_name} {usdt_value} has been submitted by__:\n" \ f"{profiles}{sub_msg}\n<hr>\n\n* Learn more [on the Gitcoin Issue Details page]({absolute_url})\n" \ "* Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or the " \ f"<a href='https://gitcoin.co/slack'>Gitcoin Slack</a>\n${amount_open_work} more funded " \ "OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" elif event_name == 'work_done': try: accepted_fulfillment = bounty.fulfillments.filter( accepted=True).latest('fulfillment_id') accepted_fulfiller = f' to @{accepted_fulfillment.fulfiller_github_username}' except BountyFulfillment.DoesNotExist: accepted_fulfiller = '' msg = f"{status_header}__The funding of {natural_value} {bounty.token_name} {usdt_value} attached to this " \ f"issue has been approved & issued{accepted_fulfiller}.__ \n\n * Learn more at [on the Gitcoin " \ f"Issue Details page]({absolute_url})\n * Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or the <a href='https://gitcoin.co/slack'>Gitcoin Slack</a>" \ f"\n * ${amount_open_work} more funded OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" return msg
def set_options(self, *args, **kwargs): self.url = 'http://campaignfinance.cdn.sos.ca.gov/dbwebexport.zip' self.verbosity = int(kwargs['verbosity']) if kwargs['test_data']: self.data_dir = get_test_download_directory() settings.CALACCESS_DOWNLOAD_DIR = self.data_dir else: self.data_dir = get_download_directory() os.path.exists(self.data_dir) or os.makedirs(self.data_dir) self.zip_path = os.path.join(self.data_dir, 'calaccess.zip') self.tsv_dir = os.path.join(self.data_dir, "tsv/") # Immediately check that the tsv directory exists when using test data, # so we can stop immediately. if kwargs['test_data']: if not os.path.exists(self.tsv_dir): raise CommandError("Data tsv directory does not exist " "at %s" % self.tsv_dir) elif self.verbosity: self.log("Using test data") self.csv_dir = os.path.join(self.data_dir, "csv/") os.path.exists(self.csv_dir) or os.makedirs(self.csv_dir) if kwargs['download']: self.download_metadata = self.get_download_metadata() self.local_metadata = self.get_local_metadata() total_size = self.download_metadata['content-length'] last_modified = self.download_metadata['last-modified'] last_download = self.local_metadata['last-download'] cur_size = 0 self.resume_download = (kwargs['resume-download'] and os.path.exists(self.zip_path)) if self.resume_download: # Make sure the downloaded chunk is newer than the # last update to the remote data. timestamp = os.path.getmtime(self.zip_path) chunk_datetime = datetime.fromtimestamp(timestamp, utc) self.resume_download = chunk_datetime > last_modified if self.resume_download: last_download = chunk_datetime cur_size = os.path.getsize(self.zip_path) prompt_context = dict( resuming=self.resume_download, already_downloaded=last_modified==last_download, last_modified=last_modified, last_download=last_download, time_ago=naturaltime(last_download), total_size=size(total_size), cur_size=size(cur_size), download_dir=self.data_dir, ) self.prompt = render_to_string( 'calaccess_raw/downloadcalaccessrawdata.txt', prompt_context, )
def humanize_time(self): return naturaltime(self.modified)
def desc(self): return "{} {} {} {}".format(naturaltime(self.web3_created), self.idx_project_length, self.bounty_type, self.experience_level)
def inner(obj): value = getattr(obj, field_name) if value: return naturaltime(value) else: return value
def naturaltime(value): if datetime.now(timezone.utc) - value <= timedelta(minutes=5): return "just now" return humanize.naturaltime(value)
def finished(self, obj): return naturaltime(obj.date_finished)
def build_github_notification(bounty, event_name, profile_pairs=None): """Build a Github comment for the specified Bounty. Args: bounty (dashboard.models.Bounty): The Bounty to be marketed. event_name (str): The name of the event. profile_pairs (list of tuples): The list of username and profile page URL tuple pairs. Returns: bool: Whether or not the Github comment was posted successfully. """ from dashboard.models import BountyFulfillment, Interest msg = '' usdt_value = "" try: usdt_value = f"({round(bounty.value_in_usdt_now, 2)} USD @ ${round(convert_token_to_usdt(bounty.token_name), 2)}/{bounty.token_name})" if bounty.value_in_usdt_now else "" except Exception: pass # no USD conversion rate available natural_value = round(bounty.get_natural_value(), 4) absolute_url = bounty.get_absolute_url() amount_open_work = "{:,}".format(amount_usdt_open_work()) profiles = "" bounty_owner = f"@{bounty.bounty_owner_github_username}" if bounty.bounty_owner_github_username else "" status_header = get_status_header(bounty) if profile_pairs: from dashboard.utils import get_ordinal_repr # hack for circular import issue for i, profile in enumerate(profile_pairs, start=1): show_dibs = event_name == 'work_started' and len(profile_pairs) > 1 dibs = f" ({get_ordinal_repr(i)} precedence)" if show_dibs else "" profiles = profiles + f"\n {i}. [@{profile[0]}]({profile[1]}) {dibs}" profiles += "\n\n" if event_name == 'new_bounty': msg = f"{status_header}__This issue now has a funding of {natural_value} " \ f"{bounty.token_name} {usdt_value} attached to it.__\n\n * If you would " \ f"like to work on this issue you can 'start work' [on the Gitcoin Issue Details page]({absolute_url}).\n " \ "* Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or the " \ f"<a href='https://gitcoin.co/slack'>Gitcoin Slack</a>\n * ${amount_open_work}" \ " more funded OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" if event_name == 'increased_bounty': msg = f"{status_header}__The funding of this issue was increased to {natural_value} " \ f"{bounty.token_name} {usdt_value}.__\n\n * If you would " \ f"like to work on this issue you can claim it [here]({absolute_url}).\n " \ "* If you've completed this issue and want to claim the bounty you can do so " \ f"[here]({absolute_url})\n * Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or " \ f"the <a href='https://gitcoin.co/slack'>Gitcoin Slack</a>\n * ${amount_open_work}" \ " more funded OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" elif event_name == 'killed_bounty': msg = f"{status_header}__The funding of {natural_value} {bounty.token_name} " \ f"{usdt_value} attached to this issue has been **cancelled** by the bounty submitter__\n\n " \ "* Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or the <a href='https://gitcoin.co/slack'>Gitcoin Slack</a>\n * " \ f"${amount_open_work} more funded OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" elif event_name == 'rejected_claim': msg = f"{status_header}__The work submission for {natural_value} {bounty.token_name} {usdt_value} " \ "has been **rejected** and can now be submitted by someone else.__\n\n * If you would " \ f"like to work on this issue you can claim it [here]({absolute_url}).\n * If you've " \ f"completed this issue and want to claim the bounty you can do so [here]({absolute_url})\n " \ "* Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or <a href='https://gitcoin.co/slack'>Gitcoin Slack</a>\n * " \ f"${amount_open_work} more funded OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" elif event_name == 'work_started': from_now = naturaltime(bounty.expires_date) msg = f"{status_header}__Work has been started__.\n{profiles} has committed to working on this project to be " \ f"completed {from_now}.\n\n" bounty_owner_clear = f"@{bounty.bounty_owner_github_username}" if bounty.bounty_owner_github_username else "" try: if profile_pairs: for profile in profile_pairs: interests = Interest.objects.filter( profile__handle=profile[0], bounty=bounty) for interest in interests: if interest.issue_message.strip(): msg += f"\n__Please answer following questions/comments__ {bounty_owner_clear}:\n\n" + \ interest.issue_message except Exception as e: print(e) elif event_name == 'work_submitted': sub_msg = "" if bounty.fulfillments.exists(): sub_msg = f"\n\n{bounty_owner if bounty_owner else 'If you are the bounty funder,'} " \ "please take a look at the submitted work:\n" for bf in bounty.fulfillments.all(): username = "******" + bf.fulfiller_github_username if bf.fulfiller_github_username else bf.fulfiller_address link_to_work = f"[PR]({bf.fulfiller_github_url})" if bf.fulfiller_github_url else "(Link Not Provided)" sub_msg += f"* {link_to_work} by {username}\n" msg = f"{status_header}__Work for {natural_value} {bounty.token_name} {usdt_value} has been submitted by__:\n" \ f"{profiles}{sub_msg}\n<hr>\n\n* Learn more [on the Gitcoin Issue Details page]({absolute_url})\n" \ "* Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or the " \ f"<a href='https://gitcoin.co/slack'>Gitcoin Slack</a>\n${amount_open_work} more funded " \ "OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" elif event_name == 'work_done': try: accepted_fulfillment = bounty.fulfillments.filter( accepted=True).latest('fulfillment_id') accepted_fulfiller = f' to @{accepted_fulfillment.fulfiller_github_username}' except BountyFulfillment.DoesNotExist: accepted_fulfiller = '' msg = f"{status_header}__The funding of {natural_value} {bounty.token_name} {usdt_value} attached to this " \ f"issue has been approved & issued{accepted_fulfiller}.__ \n\n * Learn more at [on the Gitcoin " \ f"Issue Details page]({absolute_url})\n * Questions? Checkout <a href='https://gitcoin.co/help'>Gitcoin Help</a> or the <a href='https://gitcoin.co/slack'>Gitcoin Slack</a>" \ f"\n * ${amount_open_work} more funded OSS Work available on the [Gitcoin Issue Explorer](https://gitcoin.co/explorer)\n" return msg
def created_on_nt(self, obj): return naturaltime(obj.created_on)