예제 #1
0
def render_field(field, empty_value=_(u'-- undefined --'), value=None):
    value = value or get_field_display(field)
    if value is None or value == u'':
        return empty_value
    # Try to validate value to get internal Python types.
    # We need this to check if we can apply special formatting.
    if not hasattr(field.field, 'choices'):
        try:
            value = field.field.clean(value)
        except ValidationError:
            pass
    # We don't want to print True or False in case of a CheckBox widget.
    if isinstance(field.field, BooleanField):
        return CheckboxInput(field.field.widget.attrs).render(field.name,
                                              value, attrs={'readonly': True})
    # Format dates and times nicely.
    elif isinstance(field.field, DateField) and isinstance(value,
                                                               datetime.date):
        return date(value)
    elif isinstance(field.field, DateTimeField) and isinstance(value,
                                                           datetime.datetime):
        return mark_safe(u'%s %s' % (date(value), time(value)))
    elif isinstance(field.field, TimeField) and isinstance(value,
                                                               datetime.time):
        return time(value)
    return escape(value)
예제 #2
0
def _user_to_row(user):
    simple_attrs = [
        'first_name', 'last_name', 'email',
        'playa_name', 'sponsor', 'city', 'cell_number',
        'emergency_contact_name', 'emergency_contact_phone',
        'has_ticket', 'looking_for_ticket',
        'camping_this_year', 'public_notes']

    row = [getattr(user, attr) for attr in simple_attrs]

    row.extend([date(user.arrival_date), date(user.departure_date)])

    std_restrictions = list(user.meal_restrictions.all())
    restrictions = ",".join(map(str, std_restrictions + [user.other_restrictions]))
    row.append(restrictions)

    try:
        v = user.vehicle
        row.extend([v.get_transit_arrangement_display(), v.transit_provider,
            v.model_of_car, v.make_of_car,
            v.width, v.length])
    except Vehicle.DoesNotExist:
        row.extend([""] * 6)

    try:
        s = user.shelter
        row.extend([s.get_sleeping_arrangement_display(), s.shelter_provider,
            s.number_of_people_tent_sleeps, s.sleeping_under_ubertent,
            s.width, s.length])
    except Shelter.DoesNotExist:
        row.extend([""] * 6)

    return row
예제 #3
0
 def algorithm(self):
     if self.choice == DISCOUNT_NOREFUND:
         return string_concat(_('No refund tariff'), self.quantities)
     elif self.choice == DISCOUNT_EARLY:
         return string_concat(_('Booking, earlier than %s day(days) before arrival') % self.days, self.quantities)
     elif self.choice == DISCOUNT_LATER:
         return string_concat(_('Booking, later than %s day(days) before arrival') % self.days, self.quantities)
     elif self.choice == DISCOUNT_PERIOD:
         return string_concat(_('Booking at least %s day(days)') % self.days, self.quantities)
     elif self.choice == DISCOUNT_PACKAGE:
         return _('Package: booking %(days)s day(days) at price of %(price_days)s day(days)') % \
             dict(days=self.days, price_days=self.at_price_days)
     elif self.choice == DISCOUNT_HOLIDAY:
         return string_concat(_('Booking in holidays/weekend'), self.quantities)
     elif self.choice == DISCOUNT_SPECIAL:
         return string_concat(_('Special discount'), self.quantities)
     elif self.choice == DISCOUNT_LAST_MINUTE:
         return string_concat(_('Booking after standard arrival time, over the time %(time_from)s - %(time_to)s')
                              % dict(time_from=date(self.time_on, 'H:i'), time_to=date(self.time_off, 'H:i')),
                              self.quantities)
     elif self.choice == DISCOUNT_CREDITCARD:
         return string_concat(_('Booking with creditcard'), self.quantities)
     elif self.choice == DISCOUNT_NORMAL:
         return string_concat(_('Simple discount'), self.quantities)
     else:
         return None
예제 #4
0
def pithy_timesince(d, preposition=''):
    '''
        Concise timesince.
        Modified from Pownce pithy_timesince and django timesince.
    '''
    if d is None:
        return None
    chunks = (
      (60 * 60 * 24 * 365, lambda n, d: preposition+defaultfilters.date(d, 'M j')+', '+defaultfilters.date(d, 'Y')), # 1 year+
      (60 * 60 * 24 * 7, lambda n, d: preposition+defaultfilters.date(d, 'M jS')), # 1 week+
      (60 * 60 * 24, lambda n, d: '%d %s' % (n // (60 * 60 * 24), ngettext('day', 'days', n // (60 * 60 * 24))+' ago')), # 1 day+
      (60 * 60, lambda n, d: '%d %s' % (n // (60 * 60), ngettext('hour', 'hours', n // (60 * 60))+' ago')), # 1 hour+
      (60 * 2, lambda n, d: '%d %s' % (n // 60, 'min ago')), # 2 minutes+
      (1, lambda n, d: 'just now!') # under 2 mins ago
    )
    t = time.localtime()
    if d.tzinfo:
        tz = LocalTimezone(d)
    else:
        tz = None
    now = datetime.datetime(t[0], t[1], t[2], t[3], t[4], t[5], tzinfo=tz)
    # ignore microsecond part of 'd' since we removed it from 'now'
    delta = now - (d - datetime.timedelta(0, 0, d.microsecond))
    since = delta.days * 24 * 60 * 60 + delta.seconds

    for i, (seconds, label) in enumerate(chunks):
        count = since // seconds # truncated division
        if count != 0:
            break
    return label(since, d)
예제 #5
0
 def marriage(self):
     if self.marriage_set.all():
         wedding_list = []
         for wedding in self.marriage_set.all():
             wedding_str = u''
             if self.date_or_what(wedding.marriage_date) and wedding.marriage_location:
                 
                 if isinstance(self.date_or_what(wedding.marriage_date), datetime.datetime):
                     wedding_date_str = date(self.date_or_what(wedding.marriage_date), "N j, Y")
                 else:
                     wedding_date_str = self.date_or_what(wedding.marriage_date)
                 
                 wedding_str = u'%s married %s on %s in %s' % (
                     self.pronoun(), 
                     wedding.married, 
                     wedding_date_str,
                     wedding.marriage_location, 
                 )
             elif self.date_or_what(wedding.marriage_date) and not wedding.marriage_location:
                 if isinstance(self.date_or_what(wedding.marriage_date), datetime.datetime):
                     wedding_date_str = date(self.date_or_what(wedding.marriage_date), "N j, Y")
                 else:
                     wedding_date_str = self.date_or_what(wedding.marriage_date)
                 
                 wedding_str = u'%s married %s on %s' % (
                     self.pronoun(), 
                     wedding.married, 
                     wedding_date_str, 
                 )
             elif not self.date_or_what(wedding.marriage_date) and wedding.marriage_location:
                 wedding_str = u'%s married %s in %s' % (
                     self.pronoun(), 
                     wedding.married, 
                     wedding.marriage_location, 
                 )
             elif not self.date_or_what(wedding.marriage_date) and not wedding.marriage_location:
                 wedding_str = u'%s married %s' % (
                     self.pronoun(), 
                     wedding.married, 
                 )
             
             if wedding.spouse_death:
                 # See if the 'spouse_death" is a date (a death) or a string, something else ... 
                 if isinstance(self.date_or_what(wedding.spouse_death), datetime.datetime):
                     wedding_str += u'. %s died %s' % (
                         self.other_gender(self.gender).capitalize(), 
                         date(self.date_or_what(wedding.spouse_death), "N j, Y"), 
                     )
                 else:
                     if self.date_or_what(wedding.spouse_death).count('divorce'):
                         wedding_str += u'. %s' % self.date_or_what(wedding.spouse_death)
                     else:
                         wedding_str += u'. %s died in %s' % (self.other_gender(self.gender).capitalize(), self.date_or_what(wedding.spouse_death))
             wedding_list.append(wedding_str)
         marriage_str = '. '.join(wedding_list)
         if marriage_str and marriage_str[-1] != '.':
             marriage_str += '.'
     else:
         marriage_str = u''
     return marriage_str
    def test_days_parameter_successful(self):
        """
        The ``--days`` parameter will restrict the logs to only those generated
        from today until the number of days before passed in the parameter.
        """
        # Change the timestamp of the existing logs in order to test this.
        self.log_1.timestamp = timezone.now() - timedelta(days=30)
        self.log_2.timestamp = timezone.now() - timedelta(days=10)
        self.log_1.save()
        self.log_2.save()

        call_command('report_mailing', days=15)

        self.assertEqual(len(mail.outbox), 1)

        mail_body = mail.outbox[0].body

        # The 1st log `self.log_1` is NOT present in the email, because it was
        # generated before 15 days ago from today.
        self.assertNotIn(
            date(self.log_1.timestamp, settings.DATETIME_FORMAT),
            mail_body
        )

        # The other logs `self.log_2` and `self.log_3` are properly
        self.assertIn(
            date(self.log_2.timestamp, settings.DATETIME_FORMAT),
            mail_body
        )
        self.assertIn(
            date(self.log_3.timestamp, settings.DATETIME_FORMAT),
            mail_body
        )
예제 #7
0
 def get_timesince(self):
     delta = (now() - self.creation_date)
     if delta.days <= 1:
         return u'{} ago'.format(timesince(self.creation_date, now()))
     if self.creation_date.year != now().year:
         return date(self.creation_date, 'd F Y')
     return date(self.creation_date, 'd F')
예제 #8
0
    def _send_mail(self, obj, recipients):
        subject = 'Liturgie kerkdienst'
        body = render_to_string('liturgies/mail.html', {'liturgies': [obj], 'single': True})
        template = MailTemplate.objects.filter(template_type=Templates.liturgy).first()
        if template is not None:
            extra_churches = ', '.join(list(obj.other_churches.values_list('name', flat=True)))
            subject, body = template.render({
                'extra_churches': '',
                'day': date(obj.date, 'l'),
                'liturgy_details': mark_safe(body),
                'extra_churches': '+ {}'.format(extra_churches) if extra_churches else '',
                'datetime': '{}, {}'.format(date(obj.date, 'l j F'), time(obj.service.time, 'H.i')),
                'part_of_day': obj.part_of_day,
            })

        initial = {
            'recipients': recipients,
            'body': body,
            'subject': subject,
            'is_html': True,
        }
        data = initial.copy()
        data['recipients'] = [r.id for r in recipients]
        form = LiturgyMailForm(liturgies=[obj], initial=initial, data=data)
        form.save()
예제 #9
0
파일: time.py 프로젝트: ChrisHukai/Spirit
def shortnaturaltime(value):
    """
    now, 1s, 1m, 1h, 1 Ene, 1 Ene 2012
    """
    tz = utc if is_aware(value) else None
    now = datetime.now(tz)

    if value > now:  # Future
        return '%(delta)s' % {'delta': defaultfilters.date(value, 'j M \'y')}

    delta = now - value

    if delta.days:
        if defaultfilters.date(now, 'y') == defaultfilters.date(value, 'y'):
            return '%(delta)s' % {'delta': defaultfilters.date(value, 'j M')}

        return '%(delta)s' % {'delta': defaultfilters.date(value, 'j M \'y')}

    if not delta.seconds:
        return _('now')

    count = delta.seconds
    if count < 60:
        return _('%(count)ss') % {'count': count}

    count //= 60
    if count < 60:
        return _('%(count)sm') % {'count': count}

    count //= 60
    return _('%(count)sh') % {'count': count}
예제 #10
0
def csv_redemption(request):
    """ The redemption master data. """
    response = HttpResponse(content_type='text/csv')
    response['Content-Disposition'] = 'attachment; filename="exchanges.csv"'

    start_date = request.GET.get('start', '20150901')
    start_date = convert_to_date(start_date)
    end_date = request.GET.get('end', None)

    exchanged_tickets = Ticket.objects.filter(recreation_site__isnull=False)
    exchanged_tickets = exchanged_tickets.filter(
        redemption_entry__gte=start_date)
    if end_date:
        end_date = datetime.strptime(end_date, '%Y%m%d')
        exchanged_tickets = exchanged_tickets.filter(
            redemption_entry__lte=end_date)
    exchanged_tickets = exchanged_tickets.select_related('recreation_site')
    exchanged_tickets = exchanged_tickets.prefetch_related('additionalredemption_set')

    writer = csv.writer(response)

    # Write the headers
    writer.writerow([
        'pass_record_locator',
        'created',
        'recorded', 
        'zip', 
        'location', 
        'city',
        'state',
        'duplicate',
    ])

    DATE_FORMAT = 'Ymd'
    for ticket in exchanged_tickets:

        duplicates_exist = ticket.additionalredemption_set.count() > 0
        writer.writerow([
            ticket.record_locator,
            defaultfilters.date(ticket.created, DATE_FORMAT),
            defaultfilters.date(ticket.redemption_entry, DATE_FORMAT),
            ticket.zip_code,
            ticket.recreation_site.name,
            ticket.recreation_site.city,
            ticket.recreation_site.state,
            duplicates_exist
        ])

        for ar in ticket.additionalredemption_set.all():
            writer.writerow([
                ticket.record_locator,
                defaultfilters.date(ticket.created, DATE_FORMAT),
                defaultfilters.date(ar.redemption_entry, DATE_FORMAT),
                ticket.zip_code,
                ar.recreation_site.name,
                ar.recreation_site.city, 
                ar.recreation_site.state,
                duplicates_exist
            ])
    return response
예제 #11
0
 def __str__(self):
     date_format = u'l, %s' % settings.DATE_FORMAT
     return ugettext('%(title)s: %(start)s - %(end)s') % {
         'title': self.title,
         'start': date(self.start, date_format),
         'end': date(self.end, date_format),
     }
 def test_history_entries_render_order(self):
     candidate = mommy.make('core.Candidate',
                            relatedstudent=mommy.make('core.RelatedStudent'))
     groupcomment = mommy.make('devilry_group.GroupComment',
                               feedback_set__group=candidate.assignment_group,
                               visibility=group_models.GroupComment.VISIBILITY_VISIBLE_TO_EVERYONE)
     history_entry1 = mommy.make('devilry_group.GroupCommentEditHistory', group_comment=groupcomment,
                                 visibility=group_models.GroupComment.VISIBILITY_VISIBLE_TO_EVERYONE,
                                 edited_datetime=timezone.now() - timezone.timedelta(hours=10))
     history_entry2 = mommy.make('devilry_group.GroupCommentEditHistory', group_comment=groupcomment,
                                 visibility=group_models.GroupComment.VISIBILITY_VISIBLE_TO_EVERYONE,
                                 edited_datetime=timezone.now() - timezone.timedelta(hours=14))
     history_entry3 = mommy.make('devilry_group.GroupCommentEditHistory', group_comment=groupcomment,
                                 visibility=group_models.GroupComment.VISIBILITY_VISIBLE_TO_EVERYONE,
                                 edited_datetime=timezone.now() - timezone.timedelta(hours=4))
     mockresponse = self.mock_http200_getrequest_htmls(
         cradmin_role=candidate.assignment_group,
         requestuser=candidate.relatedstudent.user,
         viewkwargs={'group_comment_id': groupcomment.id})
     self.assertEqual(mockresponse.selector.list('.devilry-comment-history-item__title')[0].alltext_normalized,
                      '{}'.format(defaultfilters.date(timezone.localtime(history_entry3.edited_datetime),
                                                      'DATETIME_FORMAT')))
     self.assertEqual(mockresponse.selector.list('.devilry-comment-history-item__title')[1].alltext_normalized,
                      '{}'.format(defaultfilters.date(timezone.localtime(history_entry1.edited_datetime),
                                                      'DATETIME_FORMAT')))
     self.assertEqual(mockresponse.selector.list('.devilry-comment-history-item__title')[2].alltext_normalized,
                      '{}'.format(defaultfilters.date(timezone.localtime(history_entry2.edited_datetime),
                                                      'DATETIME_FORMAT')))
    def test_three_groups_after_deadline(self):
        with self.settings(DEVILRY_COMPRESSED_ARCHIVES_DIRECTORY=self.backend_path):
            testassignment = mommy.make_recipe('devilry.apps.core.assignment_activeperiod_start',
                                               short_name='learn-python-basics',
                                               first_deadline=timezone.now() - timezone.timedelta(hours=1))
            testgroup1 = mommy.make('core.AssignmentGroup', parentnode=testassignment)
            testgroup2 = mommy.make('core.AssignmentGroup', parentnode=testassignment)
            testgroup3 = mommy.make('core.AssignmentGroup', parentnode=testassignment)

            # Create user as examiner on all groups.
            testuser = mommy.make(settings.AUTH_USER_MODEL, shortname='thor', fullname='Thor')
            related_examiner = mommy.make('core.RelatedExaminer', user=testuser, period=testassignment.parentnode)
            mommy.make('core.Examiner', relatedexaminer=related_examiner, assignmentgroup=testgroup1)
            mommy.make('core.Examiner', relatedexaminer=related_examiner, assignmentgroup=testgroup2)
            mommy.make('core.Examiner', relatedexaminer=related_examiner, assignmentgroup=testgroup3)

            # Create feedbackset for testgroup1 with commentfiles
            testfeedbackset_group1 = group_mommy.feedbackset_first_attempt_unpublished(group=testgroup1)
            self.__make_comment_file(feedback_set=testfeedbackset_group1, file_name='testfile.txt',
                                     file_content='testcontent group 1')
            mommy.make('core.Candidate', assignment_group=testgroup1, relatedstudent__user__shortname='april')

            # Create feedbackset for testgroup2 with commentfiles
            testfeedbackset_group2 = group_mommy.feedbackset_first_attempt_unpublished(group=testgroup2)
            self.__make_comment_file(feedback_set=testfeedbackset_group2, file_name='testfile.txt',
                                     file_content='testcontent group 2')
            mommy.make('core.Candidate', assignment_group=testgroup2, relatedstudent__user__shortname='dewey')

            # Create feedbackset for testgroup3 with commentfiles
            testfeedbackset_group3 = group_mommy.feedbackset_first_attempt_unpublished(group=testgroup3)
            self.__make_comment_file(feedback_set=testfeedbackset_group3, file_name='testfile.txt',
                                     file_content='testcontent group 3')
            mommy.make('core.Candidate', assignment_group=testgroup3, relatedstudent__user__shortname='huey')

            # run actiongroup
            self._run_actiongroup(name='batchframework_assignment',
                                  task=tasks.AssignmentCompressAction,
                                  context_object=testassignment,
                                  started_by=testuser)

            archive_meta = archivemodels.CompressedArchiveMeta.objects.get(content_object_id=testassignment.id)
            zipfileobject = ZipFile(archive_meta.archive_path)
            path_to_file_group1 = os.path.join('april',
                                               'deadline-{}'.format(defaultfilters.date(
                                                   testfeedbackset_group1.deadline_datetime, 'b.j.Y-H:i')),
                                               'after_deadline_not_part_of_delivery',
                                               'testfile.txt')
            path_to_file_group2 = os.path.join('dewey',
                                               'deadline-{}'.format(defaultfilters.date(
                                                   testfeedbackset_group2.deadline_datetime, 'b.j.Y-H:i')),
                                               'after_deadline_not_part_of_delivery',
                                               'testfile.txt')
            path_to_file_group3 = os.path.join('huey',
                                               'deadline-{}'.format(defaultfilters.date(
                                                   testfeedbackset_group3.deadline_datetime, 'b.j.Y-H:i')),
                                               'after_deadline_not_part_of_delivery',
                                               'testfile.txt')
            self.assertEqual(b'testcontent group 1', zipfileobject.read(path_to_file_group1))
            self.assertEqual(b'testcontent group 2', zipfileobject.read(path_to_file_group2))
            self.assertEqual(b'testcontent group 3', zipfileobject.read(path_to_file_group3))
예제 #14
0
파일: date.py 프로젝트: Aer-o/zds-site
def date_formatter(value, tooltip, small):
    try:
        value = datetime(value.year, value.month, value.day,
                         value.hour, value.minute, value.second)
    except AttributeError:
        return value
    except ValueError:
        return value

    if getattr(value, 'tzinfo', None):
        now = datetime.now(LocalTimezone(value))
    else:
        now = datetime.now()
    now = now - timedelta(0, 0, now.microsecond)
    if value > now:
        return "Dans le futur"
    else:
        delta = now - value
        # Natural time for today, absolute date after.
        # Reverse if in tooltip
        if (delta.days == 0) != tooltip:
            return naturaltime(value)
        elif small:
            return date(value, 'd/m/y à H\hi')
        else:
            return date(value, 'l d F Y à H\hi')
예제 #15
0
 def __unicode__(self):
     date_format = u"l, %s" % ugettext("DATE_FORMAT")
     return ugettext("%(title)s: %(start)s-%(end)s") % {
         "title": self.title,
         "start": date(self.start, date_format),
         "end": date(self.end, date_format),
     }
예제 #16
0
파일: models.py 프로젝트: airtonix/Arkestra
    def save(self):
        def slug_is_bad(self):
            if self.slug in [slug.values()[0] for slug in Event.objects.exclude(id=self.id).values("slug")]:
                return True

        if self.slug == "" or slug_is_bad(self):
            self.slug=slugify(self.short_title)
        if slug_is_bad(self):
            suffix = slugify(date(self.start_date, "Y"))
            if not suffix in self.slug:
                self.slug = self.slug + "-" + suffix
                print "adding suffix:", suffix, self.slug
        if slug_is_bad(self):
            suffix = slugify(date(self.start_date, "F"))
            if not suffix in self.slug:
                self.slug = self.slug + "-" + suffix
                print "adding suffix:", suffix, self.slug
        if slug_is_bad(self):
            suffix = slugify(date(self.start_date, "d"))
            if not suffix in self.slug:
                self.slug = self.slug + "-" + suffix
                print "adding suffix:", suffix, self.slug
        while slug_is_bad(self):
            self.slug=self.slug + "-x"
            print "adding suffix:", "-x"
        super(Event, self).save()
예제 #17
0
	def get(self, request, *args, **kwargs):
		try:
			group = models.Group.objects.get(id=kwargs['pk'])
		except ObjectDoesNotExist:
			raise Http404
		articles = group.article_set.all()
		to_json = []
		try:
			for article in articles:
				article_dict = {
					'pk': article.pk,
					'code': article.code,
					'brand': "&nbsp;",
					'description': article.description,
					'url': article.get_absolute_url(),
					'stock': article.stock,
                                        'stock_updated': date(article.stock_updated),
					'unit': article.measure_unit,
					'enabled': article.enabled,
                                        'favorited': article.favorited,
                                        'price': article.price,
                                        'price_updated': date(article.price_updated),
				}
				try:
					article_dict.update({'brand': article.brand.name })
				except:
					pass
				to_json.append(article_dict)
		except:
			raise Http404
		response_data = json.dumps(to_json)
		return HttpResponse(response_data, content_type='application/json')
예제 #18
0
 def get_timesince(self):
     delta = (now() - self.created)
     if delta.days <= 1:
         return '{0} ago'.format(timesince(self.created, now()))
     if self.created.year != now().year:
         return date(self.created, 'd F Y')
     return date(self.created, 'd F')
 def get_csv_field_value(self, obj, csv_field, DB_values):
     try: 
         field = obj._meta.get_field(csv_field)
         val = getattr(obj, field.name)
         if self.get_val(val):
             return self.get_val(val)
         
         if field.choices and not DB_values: 
             val = obj._get_FIELD_display(field)
         if callable(val):
             val = val()
         
         if (field.get_internal_type() == "ManyToManyField"):
             val = u", ".join(m2mobj.__unicode__() for m2mobj in val.all())
         if (field.get_internal_type() == "DateTimeField"):
             return defaultfilters.date(val, "G:i j.n.Y")
         if (field.get_internal_type() == "DateField"):    
             return defaultfilters.date(val, "j.n.Y")
         if (field.get_internal_type() ==  "BooleanField"):
             if (val): val = '1'
             else: val = '0'
         if (field.get_internal_type() ==  "TextField"):
             val = strip_tags(val)
         
         #TODO zatím odstraňuju, podle dokumentace tam nesmějí být
         if (val):
             val = unicode(val)
             val = re.sub(r'"', '', val)
         return val
     except FieldDoesNotExist:
         val = getattr(obj, csv_field)()
         if self.get_val(val):
             return self.get_val(val)
         return val
예제 #20
0
def datetime2human(dt, include_time=False, days_limit=7):
    '''Format a datetime object for human consumption'''
    if isinstance(dt, datetime.datetime):
        dt = localtime(dt)
        time = dt.strftime('%H:%M')
    else:
        dt = datetime.datetime(year=dt.year, month=dt.month, day=dt.day,
                tzinfo=get_default_timezone())
        dt = localtime(dt)
        include_time = False
    today = datetime.date.today()
    yesterday = today - datetime.timedelta(days=1)
    subdate = dt.date()
    if subdate == today:
        if include_time:
            return pgettext('humantime', 'today at {0}').format(time)
        else:
            return pgettext('humantime', 'today')
    elif subdate == yesterday:
        if include_time:
            return pgettext('humantime', 'yesterday at {0}').format(time)
        else:
            return pgettext('humantime', 'yesterday')
    else:
        if include_time:
            return date(dt, 'SHORT_DATETIME_FORMAT')
        else:
            return date(dt, 'SHORT_DATE_FORMAT')
예제 #21
0
    def generate(self):
        # Generate the sampling intervals and get dataUsers from db
        xLabelFormat, poolsData, reportData = self.getRangeData()

        graph1 = io.BytesIO()
        graph2 = io.BytesIO()

        # surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, WIDTH, HEIGHT)  # @UndefinedVariable

        # logger.debug('PoolsData: %s', poolsData)

        X = [v[0] for v in poolsData[0]['dataUsers']]
        data = {
            'title': _('Distinct Users'),
            'x': X,
            'xtickFnc': lambda l: filters.date(datetime.datetime.fromtimestamp(X[int(l)]), xLabelFormat) if int(l) >= 0 else '',
            'xlabel': _('Date'),
            'y': [
                {
                    'label': p['name'],
                    'data': [v[1] for v in p['dataUsers']]
                }
            for p in poolsData],
            'ylabel': _('Users')
        }

        graphs.barChart(SIZE, data, graph1)

        X = [v[0] for v in poolsData[0]['dataAccesses']]
        data = {
            'title': _('Accesses'),
            'x': X,
            'xtickFnc': lambda l: filters.date(datetime.datetime.fromtimestamp(X[int(l)]), xLabelFormat) if int(l) >= 0 else '',
            'xlabel': _('Date'),
            'y': [
                {
                    'label': p['name'],
                    'data': [v[1] for v in p['dataAccesses']]
                }
            for p in poolsData],
            'ylabel': _('Accesses')
        }

        graphs.barChart(SIZE, data, graph2)

        # Generate Data for pools, basically joining all pool data

        return self.templateAsPDF(
            'uds/reports/stats/pools-performance.html',
            dct={
                'data': reportData,
                'pools': [i[1] for i in self.getPools()],
                'beginning': self.startDate.date(),
                'ending': self.endDate.date(),
                'intervals': self.samplingPoints.num(),
            },
            header=ugettext('UDS Pools Performance Report'),
            water=ugettext('Pools Performance'),
            images={'graph1': graph1.getvalue(), 'graph2': graph2.getvalue()},
        )
예제 #22
0
    def clean(self):
        """Validate the assignment.

        Always call this before save()! Read about validation here:
        http://docs.djangoproject.com/en/dev/ref/models/instances/#id1

        Raises ValidationError if ``publishing_time`` is not between
        :attr:`Period.start_time` and ``Period.end_time``.
        """
        super(Assignment, self).clean()
        errors = {}
        if self.publishing_time is not None and self.parentnode_id is not None:
            if self.publishing_time < self.parentnode.start_time or self.publishing_time > self.parentnode.end_time:
                errors['publishing_time'] = _("Publishing time must be within %(periodname)s, "
                                              "which lasts from %(start_time)s to %(end_time)s.") % {
                    'periodname': self.parentnode.long_name,
                    'start_time': defaultfilters.date(self.parentnode.start_time,
                                                      'DATETIME_FORMAT'),
                    'end_time': defaultfilters.date(self.parentnode.end_time,
                                                    'DATETIME_FORMAT')
                }
        if self.first_deadline:
            self._clean_first_deadline(errors)
        if self.passing_grade_min_points > self.max_points:
            errors['passing_grade_min_points'] = _('The minumum number of points required to pass must be less than '
                                                   'the maximum number of points possible for the assignment. The '
                                                   'current maximum is %(max_points)s.') % {
                'max_points': self.max_points
            }

        if errors:
            raise ValidationError(errors)
예제 #23
0
 def __unicode__(self):
     date_format = u'l, %s' % ugettext("DATE_FORMAT")
     return ugettext('%(title)s: %(start)s-%(end)s') % {
         'title': self.title,
         'start': date(self.start, date_format),
         'end': date(self.end, date_format),
     }
예제 #24
0
 def save(self):
     if self.price == None:
         self.price = 0.00
     if not self.tweet_scheduled and self.tweet_time:
         from local_settings import BUFFER_CLIENT_ID, BUFFER_CLIENT_SECRET, BUFFER_ACCESS_TOKEN
         from buffpy.api import API
         from buffpy.managers.profiles import Profiles
         api = API(client_id=BUFFER_CLIENT_ID, client_secret=BUFFER_CLIENT_SECRET, access_token=BUFFER_ACCESS_TOKEN)
         twitter = Profiles(api=api).filter(service='twitter')[0]
         if self.custom_tweet:
             text = self.custom_tweet
         else:
             text_time = date(self.start, 'P')
             if self.start.date() == self.tweet_time.date():
                 text_date = 'Tonight'
             elif self.start.date() == self.tweet_time.date() + datetime.timedelta(days=1):
                 text_date = 'Tomorrow'
             else:
                 text_date = date(self.start, 'N j')
             text = ' ',join([self.title, text_time, text_date,])
             if len(text) > 140:
                 text = text[:140]
         new_tweet = twitter.updates.new(text=text, when=self.tweet_time.isoformat())
         self.custom_tweet = text
         self.tweet_scheduled = True
     return super(JackpotEvent, self).save()
예제 #25
0
 def dates (self):
   dates = self.event_set.all()[:20].values_list('start_dt', flat=True)
   ret = ', '.join([date(d, settings.DATETIME_FORMAT) for d in dates])
   if self.event_set.all().count() > 20:
     ret += ' ... '
     ret += date(self.event_set.all().reverse()[0].start_dt, settings.DATETIME_FORMAT)
     
   return ret
예제 #26
0
 def get_date(self):
     now = datetime.now()
     if date(now, 'Y/m/d') == date(self.day, 'Y/m/d'):
         return 'Бүгүн'
     elif date(now - timedelta(days=1), 'Y/m/d') == date(self.day, 'Y/m/d'):
         return 'Кечээ'
     else:
         return date(self.day, 'm/d/y')
예제 #27
0
파일: date.py 프로젝트: sigurdga/nidarholm
def single_datetime(from_date, whole_day=True):
    if whole_day:
        return date(from_date, "DATE_FORMAT")
    else:
        return u"%s, %s" % (
                    date(from_date, "DATE_FORMAT"),
                    time(from_date, "TIME_FORMAT"),
                    )
예제 #28
0
파일: session.py 프로젝트: defivelo/db
 def short(self):
     return _('{place} {date}{time}').format(
         date=date(self.day, settings.DATE_FORMAT_SHORT),
         time=(
             '@' + date(self.begin, settings.TIME_FORMAT_SHORT)
             if self.begin else ''
         ),
         place=self.organization.name
         )
 def timeframe(self):
     parts = []
     if self.started:
         parts.append(date(self.started, "F Y"))
     if self.present:
         parts.append("Present")
     elif self.ended:
         parts.append(date(self.ended, "F Y"))
     return "-".join(parts)
예제 #30
0
    def default(self, o):
        if isinstance(o, datetime.datetime):
            return defaultfilters.date(o, 'DATETIME_FORMAT')
        elif isinstance(o, datetime.date):
            return defaultfilters.date(o, 'DATE_FORMAT')
        elif isinstance(o, datetime.time):
            return defaultfilters.date(o, 'TIME_FORMAT')

        return super(DateTimeAwareJSONEncoder, self).default(o)
예제 #31
0
def test_cohort_end(cohort: Cohort, resp):
    dj_assert_contains(resp, date(cohort.end))
예제 #32
0
 def format_data_emissao(self):
     return '%s' % date(self.data_emissao, "d/m/Y")
예제 #33
0
 def format_data_entrega(self):
     return '%s' % date(self.data_entrega, "d/m/Y")
예제 #34
0
 def formatted_observation_date(self):
     # We need to be aware of the timezone, hence the defaultfilter trick
     return defaultfilters.date(self.observation_time, 'Y-m-d')
예제 #35
0
 def test_no_args(self):
     self.assertEqual(date(''), '')
     self.assertEqual(date(None), '')
예제 #36
0
    def get(self, request, namespace=None, report_slug=None):
        try:
            report = Report.objects.get(namespace=namespace, slug=report_slug)
        except:
            raise Http404

        # parse time and localize to user profile timezone
        profile = request.user.userprofile
        timezone = pytz.timezone(profile.timezone)
        now = datetime.datetime.now(timezone)

        # pin the endtime to a round interval if we are set to
        # reload periodically
        minutes = report.reload_minutes
        if minutes:
            trimmed = round_time(dt=now, round_to=60 * minutes, trim=True)
            if now - trimmed > datetime.timedelta(minutes=15):
                now = trimmed
            else:
                now = round_time(dt=now, round_to=60 * report.reload_minutes)

        widget_defs = []

        widget_defs.append({
            'datetime': str(date(now, 'jS F Y H:i:s')),
            'timezone': str(timezone),
        })
        for w in report.widgets().order_by('row', 'col'):
            # get default criteria values for widget
            # and set endtime to now, if applicable
            criteria = ReportCriteria.as_view()(request,
                                                w.section.report.namespace,
                                                w.section.report.slug, w.id)
            widget_criteria = json.loads(criteria.content)
            if 'endtime' in widget_criteria:
                widget_criteria['endtime'] = now.isoformat()

            # setup json definition object
            widget_def = {
                "widgettype":
                w.widgettype().split("."),
                "posturl":
                reverse('widget-job-list',
                        args=(w.section.report.namespace,
                              w.section.report.slug, w.id)),
                "options":
                w.uioptions,
                "widgetid":
                w.id,
                "row":
                w.row,
                "width":
                w.width,
                "height":
                w.height,
                "criteria":
                widget_criteria
            }

            widget_defs.append(widget_def)

        return HttpResponse(json.dumps(widget_defs))
예제 #37
0
def test_live_classes_datetime(resp_with_classes, recorded_live_classes):
    for live_class in recorded_live_classes:
        dj_assert_contains(resp_with_classes, date(live_class.start))
예제 #38
0
class TestUserEnrollmentApi(UrlResetMixin, MobileAPITestCase,
                            MobileAuthUserTestMixin,
                            MobileCourseAccessTestMixin,
                            MilestonesTestCaseMixin):
    """
    Tests for /api/mobile/v0.5/users/<user_name>/course_enrollments/
    """
    REVERSE_INFO = {'name': 'courseenrollment-detail', 'params': ['username']}
    ALLOW_ACCESS_TO_UNRELEASED_COURSE = True
    ALLOW_ACCESS_TO_MILESTONE_COURSE = True
    ALLOW_ACCESS_TO_NON_VISIBLE_COURSE = True
    NEXT_WEEK = datetime.datetime.now(pytz.UTC) + datetime.timedelta(days=7)
    LAST_WEEK = datetime.datetime.now(pytz.UTC) - datetime.timedelta(days=7)
    ADVERTISED_START = "Spring 2016"
    ENABLED_SIGNALS = ['course_published']
    DATES = {
        'next_week': NEXT_WEEK,
        'last_week': LAST_WEEK,
        'default_start_date': DEFAULT_START_DATE,
    }

    @patch.dict(settings.FEATURES, {"ENABLE_DISCUSSION_SERVICE": True})
    def setUp(self, *args, **kwargs):
        super(TestUserEnrollmentApi, self).setUp()

    def verify_success(self, response):
        """
        Verifies user course enrollment response for success
        """
        super(TestUserEnrollmentApi, self).verify_success(response)
        courses = response.data
        self.assertEqual(len(courses), 1)

        found_course = courses[0]['course']
        self.assertIn('courses/{}/about'.format(self.course.id),
                      found_course['course_about'])
        self.assertIn('course_info/{}/updates'.format(self.course.id),
                      found_course['course_updates'])
        self.assertIn('course_info/{}/handouts'.format(self.course.id),
                      found_course['course_handouts'])
        self.assertIn('video_outlines/courses/{}'.format(self.course.id),
                      found_course['video_outline'])
        self.assertEqual(found_course['id'], unicode(self.course.id))
        self.assertEqual(courses[0]['mode'], CourseMode.DEFAULT_MODE_SLUG)
        self.assertEqual(courses[0]['course']['subscription_id'],
                         self.course.clean_id(padding_char='_'))

        expected_course_image_url = course_image_url(self.course)
        self.assertIsNotNone(expected_course_image_url)
        self.assertIn(expected_course_image_url, found_course['course_image'])
        self.assertIn(expected_course_image_url,
                      found_course['media']['course_image']['uri'])

    def verify_failure(self, response, error_type=None):
        self.assertEqual(response.status_code, 200)
        courses = response.data
        self.assertEqual(len(courses), 0)

    @patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': True})
    def test_sort_order(self):
        self.login()

        num_courses = 3
        courses = []
        for course_index in range(num_courses):
            courses.append(CourseFactory.create(mobile_available=True))
            self.enroll(courses[course_index].id)

        # verify courses are returned in the order of enrollment, with most recently enrolled first.
        response = self.api_response()
        for course_index in range(num_courses):
            self.assertEqual(
                response.data[course_index]['course']['id'],
                unicode(courses[num_courses - course_index - 1].id))

    @patch.dict(
        settings.FEATURES, {
            'ENABLE_PREREQUISITE_COURSES': True,
            'DISABLE_START_DATES': False,
            'ENABLE_MKTG_SITE': True,
        })
    def test_courseware_access(self):
        self.login()

        course_with_prereq = CourseFactory.create(start=self.LAST_WEEK,
                                                  mobile_available=True)
        prerequisite_course = CourseFactory.create()
        set_prerequisite_courses(course_with_prereq.id,
                                 [unicode(prerequisite_course.id)])

        # Create list of courses with various expected courseware_access responses and corresponding expected codes
        courses = [
            course_with_prereq,
            CourseFactory.create(start=self.NEXT_WEEK, mobile_available=True),
            CourseFactory.create(visible_to_staff_only=True,
                                 mobile_available=True),
            CourseFactory.create(start=self.LAST_WEEK,
                                 mobile_available=True,
                                 visible_to_staff_only=False),
        ]

        expected_error_codes = [
            MilestoneAccessError().error_code,  # 'unfulfilled_milestones'
            StartDateError(self.NEXT_WEEK).error_code,  # 'course_not_started'
            VisibilityError().error_code,  # 'not_visible_to_user'
            None,
        ]

        # Enroll in all the courses
        for course in courses:
            self.enroll(course.id)

        # Verify courses have the correct response through error code. Last enrolled course is first course in response
        response = self.api_response()
        for course_index in range(len(courses)):
            result = response.data[course_index]['course']['courseware_access']
            self.assertEqual(result['error_code'],
                             expected_error_codes[::-1][course_index])

            if result['error_code'] is not None:
                self.assertFalse(result['has_access'])

    @ddt.data(
        ('next_week', ADVERTISED_START, ADVERTISED_START, "string"),
        ('next_week', None, defaultfilters.date(NEXT_WEEK,
                                                "DATE_FORMAT"), "timestamp"),
        ('next_week', '', defaultfilters.date(NEXT_WEEK,
                                              "DATE_FORMAT"), "timestamp"),
        ('default_start_date', ADVERTISED_START, ADVERTISED_START, "string"),
        ('default_start_date', '', None, "empty"),
        ('default_start_date', None, None, "empty"),
    )
    @ddt.unpack
    @patch.dict(settings.FEATURES, {
        'DISABLE_START_DATES': False,
        'ENABLE_MKTG_SITE': True
    })
    def test_start_type_and_display(self, start, advertised_start,
                                    expected_display, expected_type):
        """
        Tests that the correct start_type and start_display are returned in the
        case the course has not started
        """
        self.login()
        course = CourseFactory.create(start=self.DATES[start],
                                      advertised_start=advertised_start,
                                      mobile_available=True)
        self.enroll(course.id)

        response = self.api_response()
        self.assertEqual(response.data[0]['course']['start_type'],
                         expected_type)
        self.assertEqual(response.data[0]['course']['start_display'],
                         expected_display)

    @patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': True})
    def test_no_certificate(self):
        self.login_and_enroll()

        response = self.api_response()
        certificate_data = response.data[0]['certificate']
        self.assertDictEqual(certificate_data, {})

    def verify_pdf_certificate(self):
        """
        Verifies the correct URL is returned in the response
        for PDF certificates.
        """
        self.login_and_enroll()

        certificate_url = "http://test_certificate_url"
        GeneratedCertificateFactory.create(
            user=self.user,
            course_id=self.course.id,
            status=CertificateStatuses.downloadable,
            mode='verified',
            download_url=certificate_url,
        )

        response = self.api_response()
        certificate_data = response.data[0]['certificate']
        self.assertEquals(certificate_data['url'], certificate_url)

    @patch.dict(settings.FEATURES, {
        'CERTIFICATES_HTML_VIEW': False,
        'ENABLE_MKTG_SITE': True
    })
    def test_pdf_certificate_with_html_cert_disabled(self):
        """
        Tests PDF certificates with CERTIFICATES_HTML_VIEW set to False.
        """
        self.verify_pdf_certificate()

    @patch.dict(settings.FEATURES, {
        'CERTIFICATES_HTML_VIEW': True,
        'ENABLE_MKTG_SITE': True
    })
    def test_pdf_certificate_with_html_cert_enabled(self):
        """
        Tests PDF certificates with CERTIFICATES_HTML_VIEW set to True.
        """
        self.verify_pdf_certificate()

    @patch.dict(settings.FEATURES, {
        'CERTIFICATES_HTML_VIEW': True,
        'ENABLE_MKTG_SITE': True
    })
    def test_web_certificate(self):
        CourseMode.objects.create(
            course_id=self.course.id,
            mode_display_name="Honor",
            mode_slug=CourseMode.HONOR,
        )
        self.login_and_enroll()

        self.course.cert_html_view_enabled = True
        self.store.update_item(self.course, self.user.id)

        with mock_passing_grade():
            generate_user_certificates(self.user, self.course.id)

        response = self.api_response()
        certificate_data = response.data[0]['certificate']
        self.assertRegexpMatches(
            certificate_data['url'],
            r'http.*/certificates/user/{user_id}/course/{course_id}'.format(
                user_id=self.user.id,
                course_id=self.course.id,
            ))

    @patch.dict(settings.FEATURES, {
        "ENABLE_DISCUSSION_SERVICE": True,
        'ENABLE_MKTG_SITE': True
    })
    def test_discussion_url(self):
        self.login_and_enroll()

        response = self.api_response()
        response_discussion_url = response.data[0]['course']['discussion_url']  # pylint: disable=E1101
        self.assertIn('/api/discussion/v1/courses/{}'.format(self.course.id),
                      response_discussion_url)

    def test_org_query(self):
        self.login()

        # Create list of courses with various organizations
        courses = [
            CourseFactory.create(org='edX', mobile_available=True),
            CourseFactory.create(org='edX', mobile_available=True),
            CourseFactory.create(org='edX',
                                 mobile_available=True,
                                 visible_to_staff_only=True),
            CourseFactory.create(org='Proversity.org', mobile_available=True),
            CourseFactory.create(org='MITx', mobile_available=True),
            CourseFactory.create(org='HarvardX', mobile_available=True),
        ]

        # Enroll in all the courses
        for course in courses:
            self.enroll(course.id)

        response = self.api_response(data={'org': 'edX'})

        # Test for 3 expected courses
        self.assertEqual(len(response.data), 3)

        # Verify only edX courses are returned
        for entry in response.data:
            self.assertEqual(entry['course']['org'], 'edX')
예제 #39
0
 def __str__(self):
     return ugettext("%(start)s to %(end)s") % {
         'start': date(self.start, django_settings.DATE_FORMAT),
         'end': date(self.end, django_settings.DATE_FORMAT)
     }
예제 #40
0
 def __str__(self):
     return ugettext('%(title)s: %(start)s - %(end)s') % {
         'title': self.title,
         'start': date(self.start, django_settings.DATE_FORMAT),
         'end': date(self.end, django_settings.DATE_FORMAT),
     }
예제 #41
0
 def format_data_movimento(self):
     return '%s' % date(self.data_movimento, "d/m/Y")
예제 #42
0
    def test_nodes_seperated_data(self):
        nodes_timestamps = {
            'failed-node': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=11)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=9)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'missmatch-node1': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=11)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=55)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=9)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'missmatch-node2': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=11)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=55)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'missmatch-node3': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=50)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=9)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'unreported-node': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=127)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=126)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=125)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'changed-node': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=11)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=9)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'unchanged-node': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=25)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=24)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=23)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'pending-node': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=16)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=13)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
        }

        nodes_data = [{
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['failed-node']['catalog'],
            'certname':
            'failed-node.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['failed-node']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['failed-node']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['missmatch-node1']['catalog'],
            'certname':
            'missmatch-node1.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['missmatch-node1']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['missmatch-node1']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['missmatch-node2']['catalog'],
            'certname':
            'missmatch-node2.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['missmatch-node2']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['missmatch-node2']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['missmatch-node3']['catalog'],
            'certname':
            'missmatch-node3.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['missmatch-node3']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['missmatch-node3']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['unreported-node']['catalog'],
            'certname':
            'unreported-node.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['unreported-node']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['unreported-node']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['changed-node']['catalog'],
            'certname':
            'changed-node.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['changed-node']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['changed-node']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['unchanged-node']['catalog'],
            'certname':
            'unchanged-node.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['unchanged-node']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['unchanged-node']['report'],
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['pending-node']['catalog'],
            'certname':
            'pending-node.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['pending-node']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['pending-node']['report'],
        }]

        events_data = {
            'changed-node.example.com': {
                'failures': 0,
                'noops': 0,
                'skips': 0,
                'subject': {
                    'title': 'changed-node.example.com'
                },
                'subject-type': 'certname',
                'successes': 78
            },
            'pending-node.example.com': {
                'failures': 0,
                'noops': 100,
                'skips': 0,
                'subject': {
                    'title': 'pending-node.example.com'
                },
                'subject-type': 'certname',
                'successes': 0
            },
            'unreported-node.example.com': {
                'failures': 0,
                'noops': 0,
                'skips': 0,
                'subject': {
                    'title': 'unreported-node.example.com'
                },
                'subject-type': 'certname',
                'successes': 0
            },
            'failed-node.example.com': {
                'failures': 20,
                'noops': 0,
                'skips': 10,
                'subject': {
                    'title': 'failed-node.example.com'
                },
                'subject-type': 'certname',
                'successes': 5
            },
            'missmatch-node1.example.com': {
                'failures': 20,
                'noops': 0,
                'skips': 10,
                'subject': {
                    'title': 'missmatch-node1.example.com'
                },
                'subject-type': 'certname',
                'successes': 5
            },
            'missmatch-node2.example.com': {
                'failures': 0,
                'noops': 0,
                'skips': 0,
                'subject': {
                    'title': 'missmatch-node2.example.com'
                },
                'subject-type': 'certname',
                'successes': 25
            },
            'missmatch-node3.example.com': {
                'failures': 0,
                'noops': 50,
                'skips': 0,
                'subject': {
                    'title': 'missmatch-node3.example.com'
                },
                'subject-type': 'certname',
                'successes': 0
            }
        }
        reports_data = {
            'changed-node.example.com': {
                'status': 'changed',
            },
            'pending-node.example.com': {
                'status': 'unchanged',
            },
            'failed-node.example.com': {
                'status': 'failed',
            },
            'unreported-node.example.com': {
                'status': 'unchanged',
            },
            'missmatch-node1.example.com': {
                'status': 'failed',
            },
            'missmatch-node2.example.com': {
                'status': 'changed',
            },
            'missmatch-node3.example.com': {
                'status': 'unchanged',
            }
        }
        failed_list, changed_list, unreported_list, missmatch_list, pending_list = dictstatus(
            nodes_data,
            reports_data,
            events_data,
            sort=False,
            get_status='notall',
            puppet_run_time=60)
        # ('certname', 'latestCatalog', 'latestReport', 'latestFacts', 'success', 'noop', 'failure', 'skipped')
        failed_expected = [
            ('failed-node.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['failed-node']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['failed-node']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['failed-node']['facts'])),
                 'Y-m-d H:i:s'), 5, 0, 20, 10, 'failed'),
            ('missmatch-node1.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node1']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node1']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node1']['facts'])),
                 'Y-m-d H:i:s'), 5, 0, 20, 10, 'failed')
        ]

        changed_expected = [
            ('missmatch-node2.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node2']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node2']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node2']['facts'])),
                 'Y-m-d H:i:s'), 25, 0, 0, 0, 'changed'),
            ('changed-node.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['changed-node']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['changed-node']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['changed-node']['facts'])),
                 'Y-m-d H:i:s'), 78, 0, 0, 0, 'changed'),
        ]

        unreported_expected = [
            ('unreported-node.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['unreported-node']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['unreported-node']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['unreported-node']['facts'])),
                 'Y-m-d H:i:s'), 0, 0, 0, 0, 'unchanged')
        ]

        missmatch_expected = [
            ('missmatch-node1.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node1']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node1']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node1']['facts'])),
                 'Y-m-d H:i:s'), 5, 0, 20, 10, 'failed'),
            ('missmatch-node2.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node2']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node2']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node2']['facts'])),
                 'Y-m-d H:i:s'), 25, 0, 0, 0, 'changed'),
            ('missmatch-node3.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node3']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node3']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node3']['facts'])),
                 'Y-m-d H:i:s'), 0, 50, 0, 0, 'pending')
        ]

        pending_expected = [
            ('missmatch-node3.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node3']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node3']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node3']['facts'])),
                 'Y-m-d H:i:s'), 0, 50, 0, 0, 'pending'),
            ('pending-node.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['pending-node']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['pending-node']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['pending-node']['facts'])),
                 'Y-m-d H:i:s'), 0, 100, 0, 0, 'pending')
        ]

        # Sort lists so its easier to verify...
        failed_list.sort(key=lambda tup: tup[0])
        failed_expected.sort(key=lambda tup: tup[0])
        changed_list.sort(key=lambda tup: tup[0])
        changed_expected.sort(key=lambda tup: tup[0])
        unreported_list.sort(key=lambda tup: tup[0])
        unreported_expected.sort(key=lambda tup: tup[0])
        missmatch_list.sort(key=lambda tup: tup[0])
        missmatch_expected.sort(key=lambda tup: tup[0])
        pending_list.sort(key=lambda tup: tup[0])
        pending_expected.sort(key=lambda tup: tup[0])

        if failed_list != failed_expected:
            self.fail(msg='Failed list does not match expectations.')
        if changed_list != changed_expected:
            self.fail(msg='Changed list does not match expectations.')
        if unreported_list != unreported_expected:
            self.fail(msg='Unreported list does not match expectations.')
        if missmatch_list != missmatch_expected:
            self.fail(msg='Missmatching list does not match expectations.')
        if pending_list != pending_expected:
            self.fail(msg='Pending list does not match expectations.')
        self.assertTrue(True)
예제 #43
0
 def test_date(self):
     self.assertEqual(date(datetime(2005, 12, 29), "d F Y"),
                      '29 December 2005')
예제 #44
0
def test_cohort_start(cohort: Cohort, resp):
    dj_assert_contains(resp, date(cohort.start))
예제 #45
0
def test_webnars_datetime(resp_with_webnars, recorded_webinars):
    for webnar in recorded_webinars:
        dj_assert_contains(resp_with_webnars, date(webnar.start))
예제 #46
0
def present_dt(dt):
    return '%s at %s' % (defaultfilters.date(
        dt, 'Y-m-d'), defaultfilters.time(dt, 'f A e'))
 def _now(self, date_format):
     tzinfo = timezone.get_current_timezone() if settings.USE_TZ else None
     formatted = date(datetime.now(tz=tzinfo), date_format)
     return formatted
예제 #48
0
class TestUserEnrollmentApi(UrlResetMixin, MobileAPITestCase, MobileAuthUserTestMixin,
                            MobileCourseAccessTestMixin, MilestonesTestCaseMixin):
    """
    Tests for /api/mobile/{api_version}/users/<user_name>/course_enrollments/
    """
    REVERSE_INFO = {'name': 'courseenrollment-detail', 'params': ['username', 'api_version']}
    ALLOW_ACCESS_TO_UNRELEASED_COURSE = True
    ALLOW_ACCESS_TO_MILESTONE_COURSE = True
    ALLOW_ACCESS_TO_NON_VISIBLE_COURSE = True
    NEXT_WEEK = datetime.datetime.now(pytz.UTC) + datetime.timedelta(days=7)
    LAST_WEEK = datetime.datetime.now(pytz.UTC) - datetime.timedelta(days=7)
    THREE_YEARS_AGO = now() - datetime.timedelta(days=(365 * 3))
    ADVERTISED_START = "Spring 2016"
    ENABLED_SIGNALS = ['course_published']
    DATES = {
        'next_week': NEXT_WEEK,
        'last_week': LAST_WEEK,
        'default_start_date': DEFAULT_START_DATE,
    }

    @patch.dict(settings.FEATURES, {"ENABLE_DISCUSSION_SERVICE": True})
    def setUp(self):
        super(TestUserEnrollmentApi, self).setUp()

    def verify_success(self, response):
        """
        Verifies user course enrollment response for success
        """
        super(TestUserEnrollmentApi, self).verify_success(response)
        courses = response.data
        self.assertEqual(len(courses), 1)

        found_course = courses[0]['course']
        self.assertIn('courses/{}/about'.format(self.course.id), found_course['course_about'])
        self.assertIn('course_info/{}/updates'.format(self.course.id), found_course['course_updates'])
        self.assertIn('course_info/{}/handouts'.format(self.course.id), found_course['course_handouts'])
        self.assertEqual(found_course['id'], six.text_type(self.course.id))
        self.assertEqual(courses[0]['mode'], CourseMode.DEFAULT_MODE_SLUG)
        self.assertEqual(courses[0]['course']['subscription_id'], self.course.clean_id(padding_char='_'))

        expected_course_image_url = course_image_url(self.course)
        self.assertIsNotNone(expected_course_image_url)
        self.assertIn(expected_course_image_url, found_course['course_image'])
        self.assertIn(expected_course_image_url, found_course['media']['course_image']['uri'])

    def verify_failure(self, response, error_type=None):
        self.assertEqual(response.status_code, 200)
        courses = response.data
        self.assertEqual(len(courses), 0)

    @patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': True})
    @ddt.data(API_V05, API_V1)
    def test_sort_order(self, api_version):
        self.login()

        num_courses = 3
        courses = []
        for course_index in range(num_courses):
            courses.append(CourseFactory.create(mobile_available=True))
            self.enroll(courses[course_index].id)

        # verify courses are returned in the order of enrollment, with most recently enrolled first.
        response = self.api_response(api_version=api_version)
        for course_index in range(num_courses):
            self.assertEqual(
                response.data[course_index]['course']['id'],
                six.text_type(courses[num_courses - course_index - 1].id)
            )

    @ddt.data(API_V05, API_V1)
    @patch.dict(settings.FEATURES, {
        'ENABLE_PREREQUISITE_COURSES': True,
        'DISABLE_START_DATES': False,
        'ENABLE_MKTG_SITE': True,
    })
    def test_courseware_access(self, api_version):
        self.login()

        course_with_prereq = CourseFactory.create(start=self.LAST_WEEK, mobile_available=True)
        prerequisite_course = CourseFactory.create()
        set_prerequisite_courses(course_with_prereq.id, [six.text_type(prerequisite_course.id)])

        # Create list of courses with various expected courseware_access responses and corresponding expected codes
        courses = [
            course_with_prereq,
            CourseFactory.create(start=self.NEXT_WEEK, mobile_available=True),
            CourseFactory.create(visible_to_staff_only=True, mobile_available=True),
            CourseFactory.create(start=self.LAST_WEEK, mobile_available=True, visible_to_staff_only=False),
        ]

        expected_error_codes = [
            MilestoneAccessError().error_code,  # 'unfulfilled_milestones'
            StartDateError(self.NEXT_WEEK).error_code,  # 'course_not_started'
            VisibilityError().error_code,  # 'not_visible_to_user'
            None,
        ]

        # Enroll in all the courses
        for course in courses:
            self.enroll(course.id)

        # Verify courses have the correct response through error code. Last enrolled course is first course in response
        response = self.api_response(api_version=api_version)
        for course_index in range(len(courses)):
            result = response.data[course_index]['course']['courseware_access']
            self.assertEqual(result['error_code'], expected_error_codes[::-1][course_index])

            if result['error_code'] is not None:
                self.assertFalse(result['has_access'])

    @ddt.data(
        ('next_week', ADVERTISED_START, ADVERTISED_START, "string", API_V05),
        ('next_week', ADVERTISED_START, ADVERTISED_START, "string", API_V1),
        ('next_week', None, defaultfilters.date(NEXT_WEEK, "DATE_FORMAT"), "timestamp", API_V05),
        ('next_week', None, defaultfilters.date(NEXT_WEEK, "DATE_FORMAT"), "timestamp", API_V1),
        ('next_week', '', defaultfilters.date(NEXT_WEEK, "DATE_FORMAT"), "timestamp", API_V05),
        ('next_week', '', defaultfilters.date(NEXT_WEEK, "DATE_FORMAT"), "timestamp", API_V1),
        ('default_start_date', ADVERTISED_START, ADVERTISED_START, "string", API_V05),
        ('default_start_date', ADVERTISED_START, ADVERTISED_START, "string", API_V1),
        ('default_start_date', '', None, "empty", API_V05),
        ('default_start_date', '', None, "empty", API_V1),
        ('default_start_date', None, None, "empty", API_V05),
        ('default_start_date', None, None, "empty", API_V1),
    )
    @ddt.unpack
    @patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False, 'ENABLE_MKTG_SITE': True})
    def test_start_type_and_display(self, start, advertised_start, expected_display, expected_type, api_version):
        """
        Tests that the correct start_type and start_display are returned in the
        case the course has not started
        """
        self.login()
        course = CourseFactory.create(start=self.DATES[start], advertised_start=advertised_start, mobile_available=True)
        self.enroll(course.id)

        response = self.api_response(api_version=api_version)
        self.assertEqual(response.data[0]['course']['start_type'], expected_type)
        self.assertEqual(response.data[0]['course']['start_display'], expected_display)

    @ddt.data(API_V05, API_V1)
    @patch.dict(settings.FEATURES, {"ENABLE_DISCUSSION_SERVICE": True, 'ENABLE_MKTG_SITE': True})
    def test_discussion_url(self, api_version):
        self.login_and_enroll()

        response = self.api_response(api_version=api_version)
        response_discussion_url = response.data[0]['course']['discussion_url']
        self.assertIn('/api/discussion/v1/courses/{}'.format(self.course.id), response_discussion_url)

    @ddt.data(API_V05, API_V1)
    def test_org_query(self, api_version):
        self.login()

        # Create list of courses with various organizations
        courses = [
            CourseFactory.create(org='edX', mobile_available=True),
            CourseFactory.create(org='edX', mobile_available=True),
            CourseFactory.create(org='edX', mobile_available=True, visible_to_staff_only=True),
            CourseFactory.create(org='Proversity.org', mobile_available=True),
            CourseFactory.create(org='MITx', mobile_available=True),
            CourseFactory.create(org='HarvardX', mobile_available=True),
        ]

        # Enroll in all the courses
        for course in courses:
            self.enroll(course.id)

        response = self.api_response(data={'org': 'edX'}, api_version=api_version)

        # Test for 3 expected courses
        self.assertEqual(len(response.data), 3)

        # Verify only edX courses are returned
        for entry in response.data:
            self.assertEqual(entry['course']['org'], 'edX')

    def create_enrollment(self, expired):
        """
        Create an enrollment
        """
        if expired:
            course = CourseFactory.create(start=self.THREE_YEARS_AGO, mobile_available=True)
            enrollment = CourseEnrollmentFactory.create(
                user=self.user,
                course_id=course.id
            )
            ScheduleFactory(start=self.THREE_YEARS_AGO + datetime.timedelta(days=1), enrollment=enrollment)
        else:
            course = CourseFactory.create(start=self.LAST_WEEK, mobile_available=True)
            self.enroll(course.id)

        add_course_mode(course, upgrade_deadline_expired=False)

    def _get_enrollment_data(self, api_version, expired):
        self.login()
        self.create_enrollment(expired)
        return self.api_response(api_version=api_version).data

    def _assert_enrollment_results(self, api_version, courses, num_courses_returned, gating_enabled=True):
        self.assertEqual(len(courses), num_courses_returned)

        if api_version == API_V05:
            if num_courses_returned:
                self.assertNotIn('audit_access_expires', courses[0])
        else:
            self.assertIn('audit_access_expires', courses[0])
            if gating_enabled:
                self.assertIsNotNone(courses[0].get('audit_access_expires'))

    @ddt.data(
        (API_V05, True, 0),
        (API_V05, False, 1),
        (API_V1, True, 1),
        (API_V1, False, 1),
    )
    @ddt.unpack
    def test_enrollment_with_gating(self, api_version, expired, num_courses_returned):
        '''
        Test that expired courses are only returned in v1 of API
        when waffle flag enabled, and un-expired courses always returned
        '''
        CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime.datetime(2018, 1, 1))
        courses = self._get_enrollment_data(api_version, expired)
        self._assert_enrollment_results(api_version, courses, num_courses_returned, True)

    @ddt.data(
        (API_V05, True, 1),
        (API_V05, False, 1),
        (API_V1, True, 1),
        (API_V1, False, 1),
    )
    @ddt.unpack
    def test_enrollment_no_gating(self, api_version, expired, num_courses_returned):
        '''
        Test that expired and non-expired courses returned if waffle flag is disabled
        regarless of version of API
        '''
        CourseDurationLimitConfig.objects.create(enabled=False)
        courses = self._get_enrollment_data(api_version, expired)
        self._assert_enrollment_results(api_version, courses, num_courses_returned, False)
예제 #49
0
    def test_nodes_merged_data(self):
        nodes_timestamps = {
            'failed-node': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=11)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=9)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'missmatch-node1': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=11)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=55)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=9)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'missmatch-node2': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=11)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=55)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'missmatch-node3': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=50)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=9)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'unreported-node': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=127)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=126)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=125)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'changed-node': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=11)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=9)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'unchanged-node': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=25)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=24)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=23)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
            'pending-node': {
                'catalog':
                ((datetime.utcnow() -
                  timedelta(minutes=16)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'facts':
                ((datetime.utcnow() -
                  timedelta(minutes=13)).strftime('%Y-%m-%dT%H:%M:%S.%fZ')),
                'report':
                ((datetime.utcnow() -
                  timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'))
            },
        }

        nodes_data = [{
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['failed-node']['catalog'],
            'certname':
            'failed-node.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['failed-node']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['failed-node']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['missmatch-node1']['catalog'],
            'certname':
            'missmatch-node1.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['missmatch-node1']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['missmatch-node1']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['missmatch-node2']['catalog'],
            'certname':
            'missmatch-node2.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['missmatch-node2']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['missmatch-node2']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['missmatch-node3']['catalog'],
            'certname':
            'missmatch-node3.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['missmatch-node3']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['missmatch-node3']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['unreported-node']['catalog'],
            'certname':
            'unreported-node.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['unreported-node']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['unreported-node']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['changed-node']['catalog'],
            'certname':
            'changed-node.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['changed-node']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['changed-node']['report']
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['unchanged-node']['catalog'],
            'certname':
            'unchanged-node.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['unchanged-node']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['unchanged-node']['report'],
        }, {
            'catalog_environment':
            'production',
            'catalog_timestamp':
            nodes_timestamps['pending-node']['catalog'],
            'certname':
            'pending-node.example.com',
            'deactivated':
            None,
            'facts_environment':
            'production',
            'facts_timestamp':
            nodes_timestamps['pending-node']['facts'],
            'report_environment':
            'production',
            'report_timestamp':
            nodes_timestamps['pending-node']['report'],
        }]

        events_data = {
            'changed-node.example.com': {
                'failures': 0,
                'noops': 0,
                'skips': 0,
                'subject': {
                    'title': 'changed-node.example.com'
                },
                'subject-type': 'certname',
                'successes': 78
            },
            'pending-node.example.com': {
                'failures': 0,
                'noops': 100,
                'skips': 0,
                'subject': {
                    'title': 'pending-node.example.com'
                },
                'subject-type': 'certname',
                'successes': 0
            },
            'failed-node.example.com': {
                'failures': 20,
                'noops': 0,
                'skips': 10,
                'subject': {
                    'title': 'failed-node.example.com'
                },
                'subject-type': 'certname',
                'successes': 5
            },
            'unreported-node.example.com': {
                'failures': 0,
                'noops': 0,
                'skips': 0,
                'subject': {
                    'title': 'unreported-node.example.com'
                },
                'subject-type': 'certname',
                'successes': 0
            },
            'unchanged-node.example.com': {
                'failures': 0,
                'noops': 0,
                'skips': 0,
                'subject': {
                    'title': 'unchanged-node.example.com'
                },
                'subject-type': 'certname',
                'successes': 0
            },
            'missmatch-node1.example.com': {
                'failures': 20,
                'noops': 0,
                'skips': 10,
                'subject': {
                    'title': 'missmatch-node1.example.com'
                },
                'subject-type': 'certname',
                'successes': 5
            },
            'missmatch-node2.example.com': {
                'failures': 0,
                'noops': 0,
                'skips': 0,
                'subject': {
                    'title': 'missmatch-node2.example.com'
                },
                'subject-type': 'certname',
                'successes': 25
            },
            'missmatch-node3.example.com': {
                'failures': 0,
                'noops': 50,
                'skips': 0,
                'subject': {
                    'title': 'missmatch-node3.example.com'
                },
                'subject-type': 'certname',
                'successes': 0
            }
        }

        reports_data = {
            'changed-node.example.com': {
                'status': 'changed',
            },
            'pending-node.example.com': {
                'status': 'pending',
            },
            'failed-node.example.com': {
                'status': 'failed',
            },
            'missmatch-node1.example.com': {
                'status': 'failed',
            },
            'missmatch-node2.example.com': {
                'status': 'changed',
            },
            'missmatch-node3.example.com': {
                'status': 'pending',
            },
            'unreported-node.example.com': {
                'status': 'unchanged',
            },
            'unchanged-node.example.com': {
                'status': 'unchanged',
            }
        }

        merged_list = dictstatus(nodes_data,
                                 reports_data,
                                 events_data,
                                 sort=False,
                                 get_status='all')
        # ('certname', 'latestCatalog', 'latestReport', 'latestFacts', 'success', 'noop', 'failure', 'skipped')
        merged_expected = [
            ('failed-node.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['failed-node']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['failed-node']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['failed-node']['facts'])),
                 'Y-m-d H:i:s'), 5, 0, 20, 10, 'failed'),
            ('missmatch-node1.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node1']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node1']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node1']['facts'])),
                 'Y-m-d H:i:s'), 5, 0, 20, 10, 'failed'),
            ('missmatch-node2.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node2']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node2']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node2']['facts'])),
                 'Y-m-d H:i:s'), 25, 0, 0, 0, 'changed'),
            ('missmatch-node3.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node3']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node3']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['missmatch-node3']['facts'])),
                 'Y-m-d H:i:s'), 0, 50, 0, 0, 'pending'),
            ('unreported-node.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['unreported-node']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['unreported-node']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['unreported-node']['facts'])),
                 'Y-m-d H:i:s'), 0, 0, 0, 0, 'unchanged'),
            ('changed-node.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['changed-node']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['changed-node']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['changed-node']['facts'])),
                 'Y-m-d H:i:s'), 78, 0, 0, 0, 'changed'),
            ('unchanged-node.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['unchanged-node']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['unchanged-node']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['unchanged-node']['facts'])),
                 'Y-m-d H:i:s'), 0, 0, 0, 0, 'unchanged'),
            ('pending-node.example.com',
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['pending-node']['catalog'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['pending-node']['report'])),
                 'Y-m-d H:i:s'),
             filters.date(
                 localtime(
                     json_to_datetime(
                         nodes_timestamps['pending-node']['facts'])),
                 'Y-m-d H:i:s'), 0, 100, 0, 0, 'pending')
        ]
        # failed_list, changed_list, unreported_list, mismatch_list, pending_list
        merged_list.sort(key=lambda tup: tup[0])
        merged_expected.sort(key=lambda tup: tup[0])
        self.assertEqual(merged_list, merged_expected)
예제 #50
0
 def get_date_created(self, instance):
     return date(instance.date_created)
예제 #51
0
    def post(self, request, namespace=None, report_slug=None):
        # handle REST calls
        if namespace is None or report_slug is None:
            return self.http_method_not_allowed(request)

        logger.debug("Received POST for report %s, with params: %s" %
                     (report_slug, request.POST))

        try:
            report = Report.objects.get(namespace=namespace, slug=report_slug)
        except:
            raise Http404

        fields_by_section = report.collect_fields_by_section()
        all_fields = SortedDict()
        [all_fields.update(c) for c in fields_by_section.values()]
        form = TableFieldForm(all_fields,
                              hidden_fields=report.hidden_fields,
                              data=request.POST,
                              files=request.FILES)

        if form.is_valid():
            logger.debug('Form passed validation: %s' % form)
            formdata = form.cleaned_data
            logger.debug('Form cleaned data: %s' % formdata)

            # parse time and localize to user profile timezone
            profile = request.user.userprofile
            timezone = pytz.timezone(profile.timezone)
            form.apply_timezone(timezone)

            if formdata['debug']:
                logger.debug("Debugging report and rotating logs now ...")
                management.call_command('rotate_logs')

            logger.debug("Report %s validated form: %s" %
                         (report_slug, formdata))

            # setup definitions for each Widget
            definition = []

            # store datetime info about when report is being run
            # XXX move datetime format to preferences or somesuch
            now = datetime.datetime.now(timezone)
            definition.append({
                'datetime': str(date(now, 'jS F Y H:i:s')),
                'timezone': str(timezone),
                'debug': formdata['debug']
            })

            # create matrix of Widgets
            lastrow = -1
            rows = []
            for w in report.widgets().order_by('row', 'col'):
                if w.row != lastrow:
                    lastrow = w.row
                    rows.append([])
                rows[-1].append(Widget.objects.get_subclass(id=w.id))

            # populate definitions
            for row in rows:
                for w in row:
                    widget_def = {
                        "widgettype":
                        w.widgettype().split("."),
                        "posturl":
                        reverse('widget-job-list',
                                args=(report.namespace, report.slug, w.id)),
                        "options":
                        w.uioptions,
                        "widgetid":
                        w.id,
                        "row":
                        w.row,
                        "width":
                        w.width,
                        "height":
                        w.height,
                        "criteria":
                        w.criteria_from_form(form)
                    }
                    definition.append(widget_def)

            logger.debug("Sending widget definitions for report %s: %s" %
                         (report_slug, definition))

            return HttpResponse(json.dumps(definition))
        else:
            # return form with errors attached in a HTTP 200 Error response
            return HttpResponse(str(form.errors), status=400)
예제 #52
0
def date(dt, arg=None):
    from django.template.defaultfilters import date

    if not timezone.is_aware(dt):
        dt = dt.replace(tzinfo=timezone.utc)
    return date(dt, arg)
예제 #53
0
def html_list(list):
    ret = '<ul>'
    for debate in list:
        title, url, created, user = debate
        ret += '<li><a href="%s">%s</a><br />%s, %s' % (url, title, user.get_full_name(), date(created, "DATE_FORMAT") + " " + time(created, "TIME_FORMAT"))
    ret += '</ul>'
    return ret
예제 #54
0
def formatDate(dateStr):
    return date(dateStr, "Y-m-d H:i:s")
예제 #55
0
def dateformat(value):
    """Formats a date according to the given date."""
    if value.date() == timezone.now().date():
        return 'Today, {}'.format(time(value, 'TIME_FORMAT'))
    else:
        return date(value, 'MONTH_DAY_FORMAT')
예제 #56
0
 def test_escape_characters(self):
     self.assertEqual(date(datetime(2005, 12, 29), r'jS \o\f F'),
                      '29th of December')
예제 #57
0
 def get_review_notification_email_sent(self, instance):
     return date(instance.review_notification_email_sent)
예제 #58
0
def monthname(n):
    """
    Return the monthname for month # n in current language.
    """
    d = datetime.date(2013, n, 1)
    return defaultfilters.date(d, 'F')
예제 #59
0
 def get_approval_notification_email_sent(self, instance):
     return date(instance.approval_notification_email_sent)
예제 #60
0
def generar_informe(request, informe_de, parametros, tipo):
    """Generar informes sobre algun/a persona/objeto.
    informe_de -> Tipo de informe, 
        puede ser: empleado, cliente, 
        administrador, vehículo, etc.
    parametros -> Valores/restricciones del informe,
        puede ser: Todo, último mes,
        última semana, último año,
        o incluso descartados.
    tipo -> formato de salida del informe,
        puede ser: excel, pdf, csv, word.
        
        El flujo de datos va de CSV a XLSX, 
        luego pasa a Docx y finalmente se 
        convierte en PDF, previo es obligado.
    """

    tablas_db = apps.all_models['Mantenedor']
    informes = {
        # 'NOMBRE_EN_FORM_DE_HTML' : tablas_db['NOMBRE_TABLA_ORACLE'],
        'empleado': tablas_db['empleado'],
        'cliente': tablas_db['cliente'],
        'proveedor': tablas_db['proveedor'],
        'administrador': tablas_db['perfil'],
        'vehiculo': tablas_db['infoauto'],
    }

    # Abreviación, extensión y 'content-type' de archivos y sus formatos.
    tipos_admitidos = {
        'csv': ['csv', 'text/csv'],
        'excel': [
            'xlsx',
            'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
        ],
        'word': [
            'docx',
            ' application/vnd.openxmlformats-officedocument.wordprocessingml.document'
        ],
        'pdf': ['pdf', 'application/pdf'],
    }

    # Se activa la traducción de fechas a español.
    activate('es')

    # Obtenemos el nombre del mes en español.
    today = datetime.date.today()
    mes = date(today, 'F')

    # Se asigna la fecha actual de dos maneras,
    # Una para ser escrita en los nombres de archivos,
    # La otra para ser escrita dentro de los informes.
    now = datetime.datetime.now().strftime('%Y-%m-%d__%H_%M_%S')
    now_ = datetime.datetime.now().strftime(f'%d de {mes} de %Y, %H:%M %p')

    # Se normaliza el dato.
    tipo = tipo.lower()

    # Validación de formato.
    if tipo not in ['csv', 'excel', 'word', 'pdf']:
        return HttpResponse('ERROR, el tipo de formato no es válido!')

    # Se define el nombre del archivo.
    nombre_archivo = f'informe_{informe_de}_{now}'
    nombre_archivo_con_extension = f'informe_{informe_de}_{now}.{tipos_admitidos[tipo][0]}'

    # Se define el tipo de respuesta y la cabecera.
    response = HttpResponse(
        content_type=f'{tipos_admitidos[tipo][1]}',
        headers={
            'Content-Disposition':
            f'attachment; filename="{nombre_archivo_con_extension}"'
        },
    )

    # Obtener los títulos de una tabla.
    fields = informes[informe_de]._meta.get_fields()
    titulos = list()
    for titulo in fields:
        try:
            titulo.field
        except AttributeError:
            titulos.append(titulo.name)

    writer = csv.writer(response)
    writer.writerow(titulos)

    # Se obtiene la info de la base de datos.
    nombre_campos = informes[informe_de]._meta.get_fields()
    for fila in informes[informe_de].objects.all():
        temp = list()
        for columna in nombre_campos:
            try:
                temp.append(fila.serializable_value(columna.name))
            except AttributeError:
                pass
        writer.writerow(temp)

    # Devuelve un archivo CSV.
    if tipo == 'csv':
        return response

    # Se define la ubicación de los archivos temporales.
    temp_folder = f'{os.path.realpath(".")}\\__temp\\'
    temp_csv = f'{temp_folder}__temp.csv'

    # Se corrobora que exista la carpeta temporal.
    if not os.path.exists(temp_folder):
        os.mkdir(temp_folder)

    # Se escribe el CSV en físico.
    temp = open(temp_csv, 'wb')
    temp.write(response.content)
    temp.close()

    # Devuelve un archivo XLSX.
    if tipo == 'excel':
        # Pandas lee el CSV desde un archivo.
        archivo_leido = pandas.read_csv(temp_csv)

        # Se convierte a excel y se almacena como archivo XLSX.
        archivo_leido.to_excel(f'{temp_folder}{nombre_archivo_con_extension}',
                               index=None,
                               header=True,
                               sheet_name=f'{informe_de}')

        # Devuelve un archivo XLSX.
        return FileResponse(
            open(f'{temp_folder}{nombre_archivo_con_extension}', 'rb'))

    # Se crea y se rellena un archivo DOCX.
    document = docx.Document()
    document.add_heading(f'Informe de {informe_de}', 0)
    document.add_paragraph(f'Con fecha {now_}.')

    with open(temp_csv, newline='') as f:
        csv_reader = csv.reader(f)
        csv_headers = next(csv_reader)
        csv_cols = len(csv_headers)
        table = document.add_table(rows=2, cols=csv_cols)
        hdr_cells = table.rows[0].cells
        for i in range(csv_cols):
            hdr_cells[i].text = csv_headers[i]

        for row in csv_reader:
            row_cells = table.add_row().cells
            for i in range(csv_cols):
                try:
                    row_cells[i].text = row[i]
                except IndexError:
                    pass
    document.add_page_break()
    document.save(f'{temp_folder}{nombre_archivo}.docx')

    # Devuelve un archivo DOCX.
    if tipo == 'word':
        return FileResponse(open(f'{temp_folder}{nombre_archivo}.docx', 'rb'))

    if tipo == 'pdf':
        try:
            # Solución 1.
            # Usando MS-Office 365
            print('\nUsando Office 365\n')
            docx2pdf.convert(f'__temp\{nombre_archivo}.docx')
        except:
            import subprocess
            # Solución 2.
            # Usando LibreOffice.
            print('\nUsando LibreOffice\n')
            path_to_soffice_exe = '"C:\Program Files\LibreOffice\program\soffice.exe"'
            to_pdf = '-headless -convert-to pdf'
            outdir = '-outdir .\__temp'
            res = subprocess.run(
                f'{path_to_soffice_exe} {to_pdf} {outdir} "__temp\{nombre_archivo}.docx"'
            )
            print(f'\n\n{res}\n\n')
        return FileResponse(open(f'{temp_folder}{nombre_archivo}.pdf', 'rb'))
    else:
        return HttpResponse('Error con el servidor...')