示例#1
0
    def to_dict(self, use_value_dict=False):
        """
        Method: to_dict

        Encodes the whole graph as dictionary.

        Returns:
         {dict} the graph as dictionary
        """
        node_set = self.nodes.filter(deleted=False)
        edge_set = self.edges.filter(deleted=False)
        group_set = self.groups.filter(deleted=False)
        nodes = [node.to_dict(use_value_dict) for node in node_set]
        edges = [edge.to_dict(use_value_dict) for edge in edge_set]
        groups = [group.to_dict(use_value_dict) for group in group_set]

        node_seed = self.nodes.aggregate(Max('client_id'))['client_id__max']
        edge_seed = self.edges.aggregate(Max('client_id'))['client_id__max']
        group_seed = self.groups.aggregate(Max('client_id'))['client_id__max']

        return {
            'id': self.pk,
            'seed': max(node_seed, edge_seed, group_seed),
            'name': self.name,
            'type': self.kind,
            'readOnly': self.read_only,
            'nodes': nodes,
            'edges': edges,
            'nodeGroups': groups
        }
示例#2
0
    def test_aggregations(self):
        for age, birthday in (
            [4, (2007, 12, 25)],
            [4, (2006, 1, 1)],
            [1, (2008, 12, 1)],
            [4, (2006, 6, 1)],
            [12, (1998, 9, 1)],
        ):
            Person.objects.create(age=age, birthday=datetime(*birthday))

        aggregates = Person.objects.aggregate(Min('age'),
                                              Max('age'),
                                              avgage=Avg('age'))
        self.assertEqual(aggregates, {
            'age__min': 1,
            'age__max': 12,
            'avgage': 5.0
        })

        # With filters and testing the sqlaggregates->mongoaggregate
        # conversion.
        aggregates = Person.objects.filter(age__gte=4).aggregate(
            Min('birthday'), Max('birthday'), Avg('age'), Count('id'))
        self.assertEqual(
            aggregates, {
                'birthday__max': datetime(2007, 12, 25, 0, 0),
                'birthday__min': datetime(1998, 9, 1, 0, 0),
                'age__avg': 6.0,
                'id__count': 4,
            })
示例#3
0
    def handle(self, *args, **options):
        qs = PeerLink.objects.all()
        normalised_count_max = log(
            qs.aggregate(Max('count'))['count__max'] *
            settings.PEER_NORMALISATION_FACTOR)
        site_map = {}

        for site in qs:
            site_map[site.domain] = (
                log(site.count * settings.PEER_NORMALISATION_FACTOR) /
                normalised_count_max) * settings.PEER_FACTOR

        qs = UserLink.objects.all()
        normalised_count_max = log(
            qs.aggregate(Max('count'))['count__max'] *
            settings.USER_NORMALISATION_FACTOR)

        for site in qs:
            site_map[site.domain] += (
                log(site.count * settings.USER_NORMALISATION_FACTOR) /
                normalised_count_max) * settings.USER_FACTOR

        for key, value in site_map.items():
            site_map[key] += settings.OFFSET_FACTOR
            # remove www from start if any
            xkey = re.sub('^www.', '', key) + '/*'
            formatted_string = '{domain}\t_cse_e3hycfajgt0\t{factor:1.6f}'.format(
                domain=xkey, factor=value + settings.OFFSET_FACTOR)
            self.stdout.write(self.style.SUCCESS(formatted_string))
        xml_parser = Parser(label='_cse_e3hycfajgt0')
        xml_parser.parse(site_map)
        xml_parser.write_file()
示例#4
0
def team_rank(team, round_name=None):
    """Returns the rank of the team across all groups."""
    if not round_name:
        round_name = challenge_mgr.get_round_name()

    aggregate = ScoreboardEntry.objects.filter(
        profile__team=team,
        round_name=round_name).aggregate(points=Sum("points"),
                                         last=Max("last_awarded_submission"))

    points = aggregate["points"] or 0
    last_awarded_submission = aggregate["last"]
    # Group by teams, filter out other rounds, and annotate.
    annotated_teams = ScoreboardEntry.objects.values("profile__team").filter(
        round_name=round_name).annotate(
            team_points=Sum("points"),
            last_awarded=Max("last_awarded_submission"))

    count = annotated_teams.filter(team_points__gt=points).count()
    # If there was a submission, tack that on to the count.
    if last_awarded_submission:
        count = count + annotated_teams.filter(
            team_points=points,
            last_awarded_submission__gt=last_awarded_submission).count()

    return count + 1
示例#5
0
 def define_prices(self):
     return self.aggregate(
         ads_count=Count('id'),
         max_price_uzs=Max('price_uzs'),
         min_price_uzs=Min('price_uzs'),
         avg_price_uzs=Avg('price_uzs'),
         max_price_usd=Max('price_usd'),
         min_price_usd=Min('price_usd'),
         avg_price_usd=Avg('price_usd'),
     )
示例#6
0
 def get_users_with_trial_subscription(self):
     return self.filter(owner__is_active=True).values('owner').annotate(
         sub_count=Count('id'),
         state=Max('state'),
         date=Max('expiration_date')).values(
             'owner__email', 'owner__first_name',
             'owner__last_name').filter(
                 sub_count=1,
                 date__gte=datetime.date.today(),
                 state=SUBSCRIPTION_STATE_TRIAL).distinct()
示例#7
0
 def get_users_with_trial_subscription_expiring_in(self, days=7):
     return self.all().values('owner').annotate(
         sub_count=Count('id'),
         state=Max('state'),
         date=Max('expiration_date')).values(
             'owner__email', 'owner__first_name',
             'owner__last_name').filter(
                 sub_count=1,
                 date=datetime.date.today() + datetime.timedelta(days),
                 state=SUBSCRIPTION_STATE_TRIAL).distinct().order_by(
                     'owner__email')
示例#8
0
 def get_users_with_expired_subscription(self):
     return self.filter(owner__is_active=True)\
                .values('owner')\
                .annotate(max_date=Max('expiration_date'),
                          max_state=Max('state'))\
                .values('owner__email',
                        'owner__first_name',
                        'owner__last_name')\
                .filter(max_date__lt=datetime.date.today(),
                        max_state__lt=SUBSCRIPTION_STATE_FREE)\
                .distinct()
示例#9
0
 def get_queryset(self, request: HttpRequest) -> QuerySet:
     return super().get_queryset(request).annotate(
         title_ukr=Subquery(self.title_ukr_query[:1]),
         title_eng=Subquery(self.title_eng_query[:1]),
         title_not_ukr=Subquery(self.title_not_ukr_query),
         max_season=Max('posts__seasons__season'),
         max_year=Max('posts__years__year'),
         max_registered_on=Max('posts__raw_post__registered_on'),
     ).order_by('-max_year', '-max_registered_on', 'title_ukr').exclude(
         title_ukr=None,
         title_not_ukr=None,
     )
示例#10
0
    def stats(cls, events=None):
        '''
        Returns stats on the events queryset provided.

        :param events: A queryset of events, that have the fields sessions, games, players
        '''
        if events is None:
            events = cls.implicit()

        if events:
            # Tailwind's Median aggregator does not work on Durations (PostgreSQL Intervals)
            # So we have to convert it to Epoch time. Extract is a Django method that can extract
            # 'epoch' which is the documented method of casting a PostgreSQL interval to epoch time.
            #    https://www.postgresql.org/message-id/19495.1059687790%40sss.pgh.pa.us
            # Django does not document 'epoch' alas but it works:
            #    https://docs.djangoproject.com/en/4.0/ref/models/database-functions/#extract
            # We need a Django ExpressionWrapper to cast the uration field to DurationField as
            # for some reason even though it's a PostgreSQL interval, Django still thinks of it
            # as a DateTimeField (from the difference of two DateTimeFields I guess and a bug/feature)
            # that fails tor ecast a difference of DateTimeFiled's as DurationField.
            epoch_duration = Extract(ExpressionWrapper(F('duration'), output_field=DurationField()), lookup_name='epoch')
            epoch_gap = Extract(ExpressionWrapper(F('gap_time'), output_field=DurationField()), lookup_name='epoch')

            result = events.aggregate(Min('sessions'),
                                      Avg('sessions'),
                                      Median('sessions'),
                                      Max('sessions'),
                                      Min('games'),
                                      Avg('games'),
                                      Median('games'),
                                      Max('games'),
                                      Min('players'),
                                      Avg('players'),
                                      Median('players'),
                                      Max('players'),
                                      duration__min=Min('duration'),
                                      duration__avg=Avg('duration'),
                                      duration__median=Median(epoch_duration),
                                      duration__max=Max('duration'),
                                      gap__min=Min('gap_time'),
                                      gap__avg=Avg('gap_time'),
                                      gap__median=Median(epoch_gap),
                                      gap__max=Max('gap_time'))

            # Aggregate is a QuerySet enpoint (i.e results in evaluation of the Query and returns
            # a standard dict. To wit we can cast teh Epch times back to Durations for the consumer.
            result['duration__median'] = timedelta(seconds=result['duration__median'])
            result['gap__median'] = timedelta(seconds=result['gap__median'])
        else:
            result = None

        return result
示例#11
0
def all_notifications(request):

    user_info_obj = User_info.objects.filter(user_id=request.user.id)
    """ wish notifications """
    wish_id_list = Wish.objects.filter(user=user_info_obj).values('id')
    rewish_notify_obj = Feed.objects.filter(wish__in=wish_id_list).filter(
        status='A',
        feed_type__in=('RW', 'RO',
                       'RR')).exclude(user=user_info_obj).order_by("-added")
    """ talk notifications """
    id_list = Talk_comment.objects.filter(user=user_info_obj).values(
        'user', 'talk').annotate(Max('id')).values('id__max')
    talk_notify_obj = Talk_comment.objects.filter(pk__in=id_list).exclude(
        notify_count=0).order_by("-added")
    """ friend requests received """
    invitations = FriendshipInvitation.objects.filter(
        to_user=request.user).order_by("-sent")
    """ friendships accepted """
    accepted_obj = Friendship.objects.filter(
        from_user=request.user).order_by("-added")
    """ talks test """
    talk_list = Talk.objects.filter(user=user_info_obj).values('id')
    comment_list = Talk_comment.objects.filter(talk__in=talk_list).filter(
        user=user_info_obj).values('talk').distinct()
    #my_talk_obj = Talk.objects.filter(pk__in=talk_list).exclude(pk__in=comment_list)
    my_talk_obj = Talk.objects.filter(user=user_info_obj).exclude(
        talk_comment__user=user_info_obj).exclude(
            talk_comment__user=None).order_by('-talk_comment__added')

    if request.method == 'POST':
        if 'wish_read_id' in request.POST:
            """ Make clicked rewish notification as RR(Rewish Read) """
            Feed.objects.filter(id=request.POST['wish_read_id']).update(
                feed_type='RR')
        if 'wishes' in request.POST:
            """ Make all rewish notification as RO(Rewish Observed) on click on Wishes tab """
            Feed.objects.filter(wish__in=wish_id_list).filter(
                feed_type='RW').update(feed_type='RO')
        if 'talks' in request.POST:
            """ Make notify_count of my top comment in all my talks as 0 """
            Talk_comment.objects.filter(user=user_info_obj).values(
                'user', 'talk').annotate(Max('id')).update(notify_count=0)
    return render_to_response('notifications.html', {
        'rewish_notify_obj': rewish_notify_obj,
        'talk_notify_obj': talk_notify_obj,
        'invitations': invitations,
        'accepted_obj': accepted_obj,
        'my_talk_obj': my_talk_obj
    },
                              context_instance=RequestContext(request))
示例#12
0
    def get_next_few_days_of_tiled_overlays(cls):

        # Pick how many days into the future and past we want to display overlays for
#TODO put in the ISBASE
        next_few_days_of_overlays = Overlay.objects.filter(
            applies_at_datetime__gte=timezone.now()-timedelta(hours=2),
            applies_at_datetime__lte=timezone.now()+timedelta(days=4),
            is_tiled=True,
        )

        next_few_days_of_sst_overlays = next_few_days_of_overlays.filter(definition_id__in=[1, 3])
        next_few_days_of_wave_overlays = next_few_days_of_overlays.filter(definition_id=4)

        # Get the newest overlay for each Model type and time. This assumes that for a certain model date,
        # a larger ID value
        # indicates a more recently-created (and hence more accurate) overlay.
        # Note that a higher ID does NOT by itself indicate a more recent MODEL date
        # because a datafile's time indexes get plotted asynchronously. I.e. tomorrow at 1 PM and tomorrow at 5 PM do not
        # get plotted in that order, but two days' past forecast for tomorrow 1 PM will always get plotted before
        # yesterday's forecast for tomorrow 1 PM.
        and_the_newest_for_each_wave = next_few_days_of_wave_overlays.values('definition_id', 'applies_at_datetime')\
            .annotate(newest_id=Max('id'))
        wave_ids = and_the_newest_for_each_wave.values_list('newest_id', flat=True)

        and_the_newest_for_each_sst = next_few_days_of_sst_overlays.values('definition_id', 'applies_at_datetime')\
            .annotate(newest_id=Max('id'))
        sst_ids = and_the_newest_for_each_sst.values_list('newest_id', flat=True)

        # Filter out only the most recent overlay for each type and time
        newest_sst_overlays_to_display = next_few_days_of_sst_overlays.filter(id__in=sst_ids).order_by('definition', 'applies_at_datetime')
        newest_wave_overlays_to_display = next_few_days_of_wave_overlays.filter(id__in=wave_ids).order_by('definition', 'applies_at_datetime')

        wave_dates = newest_wave_overlays_to_display.values_list( 'applies_at_datetime', flat=True)
        sst_dates = newest_sst_overlays_to_display.values_list( 'applies_at_datetime', flat=True)

        #Get the distinct dates where there is an SST, currents, and also a wave overlay
        date_overlap = next_few_days_of_overlays.filter(applies_at_datetime__in=list(sst_dates))\
            .filter(applies_at_datetime__in=list(wave_dates)).values_list('applies_at_datetime', flat=True).distinct()


        # Now get the actual overlays where there is an overlap
        overlapped_sst_items_to_display = newest_sst_overlays_to_display.filter(applies_at_datetime__in=list(date_overlap))
        overlapped_wave_items_to_display = newest_wave_overlays_to_display.filter(applies_at_datetime__in=list(date_overlap))

        #Join the two sets
        all_items_to_display = overlapped_sst_items_to_display | overlapped_wave_items_to_display

        # Send the items back to the SharkEyesCore/views.py file, which preps the main page to be loaded.
        return all_items_to_display
示例#13
0
 def get_queryset(self):
     queryset = LogGame.objects.all().values('pname').annotate(
         score=Max('score')).order_by('-score')
     pnames = [item['pname'] for item in queryset]
     scores = [item['score'] for item in queryset]
     return LogGame.objects.filter(pname__in=pnames,
                                   score__in=scores).order_by('-score')
示例#14
0
    def commit_pending(self, reason, request, skip_push=False, force=False):
        """Commit any pending changes."""
        if not force and not self.unit_set.filter(pending=True).exists():
            return False

        self.log_info('committing pending changes (%s)', reason)

        with self.component.repository.lock:
            while True:
                # Find oldest change break loop if there is none left
                try:
                    unit = self.unit_set.filter(pending=True).annotate(
                        Max('change__timestamp')).order_by(
                            'change__timestamp__max')[0]
                except IndexError:
                    break

                # Get last change metadata
                author, timestamp = unit.get_last_content_change(request)

                author_name = author.get_author_name()

                # Flush pending units for this author
                self.update_units(author_name, author.id)

                # Commit changes
                self.git_commit(request,
                                author_name,
                                timestamp,
                                skip_push=skip_push)

        # Update stats (the translated flag might have changed)
        self.invalidate_cache()

        return True
示例#15
0
 def filter_last_counselor_usernames(self, qs, field_name, value):
     qs = qs.annotate(
         max_transfer_date=Max("takeover__transfer_date")).filter(
             takeover__transfer_date=F("max_transfer_date"),
             takeover__counselor__username__in=value,
         )
     return qs
示例#16
0
    def form_valid(self, form):
        current_parent = self.object.parent
        current_lvl = self.object.lvl
        updated_menu_obj = form.save(commit=False)
        if updated_menu_obj.parent != current_parent:
            if updated_menu_obj.parent.id == updated_menu_obj.id:
                return JsonResponse({
                    'error':
                    True,
                    'message':
                    'you can not choose the same as parent'
                })
            menu_count = Menu.objects.filter(
                parent=updated_menu_obj.parent).count()
            updated_menu_obj.lvl = menu_count + 1
            menu_max_lvl = Menu.objects.filter(
                parent=current_parent).aggregate(Max('lvl'))['lvl__max']
            if menu_max_lvl != 1:
                for i in Menu.objects.filter(parent=current_parent,
                                             lvl__gt=current_lvl,
                                             lvl__lte=menu_max_lvl):
                    i.lvl = i.lvl - 1
                    i.save()
        if updated_menu_obj.url[-1] != '/':
            updated_menu_obj.url = updated_menu_obj.url + '/'
        updated_menu_obj.save()

        messages.success(self.request, 'Successfully updated menu')
        return JsonResponse({
            'error': False,
            'response': 'Successfully updated menu'
        })
示例#17
0
 def save(self, commit=True):
     max_index = self.sample.grain_set.aggregate(Max('index'))['index__max']
     if not max_index:
         max_index = 0
     self.instance.index = max_index + 1
     max_w = 0
     max_h = 0
     for f in self.cleaned_data['files']:
         w = f['width']
         if max_w < w:
             max_w = w
         h = f['height']
         if max_h < h:
             max_h = h
     self.instance.image_width = max_w
     self.instance.image_height = max_h
     self.instance.sample = self.sample
     region = Region(grain=self.instance, shift_x=0, shift_y=0)
     x_margin = int(max_w / 20)
     y_margin = int(max_h / 20)
     v0 = Vertex(region=region, x=x_margin, y=y_margin)
     v1 = Vertex(region=region, x=x_margin, y=max_h - y_margin)
     v2 = Vertex(region=region, x=max_w - x_margin, y=max_h - y_margin)
     v3 = Vertex(region=region, x=max_w - x_margin, y=y_margin)
     inst = super().save(commit)
     if commit:
         region.save()
         v0.save()
         v1.save()
         v2.save()
         v3.save()
     return inst
示例#18
0
    def rows(self):
        del_data = []
        if 'data_config' in self.config:
            data_config = self.config['data_config']
            delivered = data_config.delivery_data_total.get('received', 0)
            not_delivered = data_config.delivery_data_total.get(
                'not_received', 0)
            del_data.append(
                GroupSummary(title=SupplyPointStatusTypes.DELIVERY_FACILITY,
                             responded=delivered + not_delivered,
                             on_time=delivered + not_delivered,
                             complete=delivered,
                             total=data_config.delivery_data_total.get(
                                 'total', 0)))
            return del_data

        if self.config['org_summary']:
            try:
                data = GroupSummary.objects.filter(
                    title=SupplyPointStatusTypes.DELIVERY_FACILITY,
                    org_summary__in=self.config['org_summary']).aggregate(
                        Avg('responded'), Avg('on_time'), Avg('complete'),
                        Max('total'))

                del_data.append(
                    GroupSummary(
                        title=SupplyPointStatusTypes.DELIVERY_FACILITY,
                        responded=data['responded__avg'],
                        on_time=data['on_time__avg'],
                        complete=data['complete__avg'],
                        total=data['total__max']))
            except GroupSummary.DoesNotExist:
                return del_data
        return del_data
示例#19
0
文件: tests.py 项目: zzjeric/django
 def test_object_create_with_aggregate(self):
     # Aggregates are not allowed when inserting new data
     with self.assertRaisesMessage(FieldError, 'Aggregate functions are not allowed in this query'):
         Company.objects.create(
             name='Company', num_employees=Max(Value(1)), num_chairs=1,
             ceo=Employee.objects.create(firstname="Just", lastname="Doit", salary=30),
         )
示例#20
0
def CallDictFunc(request):
    profile_list = Profile.objects.all()
    #print(profile_list)
    for row in profile_list.values_list():
        print(row)

    print(Profile.objects.aggregate(Avg('age')))
    print(Profile.objects.aggregate(Max('age')))
    print(Profile.objects.aggregate(Sum('age')))
    print(Profile.objects.aggregate(Count('age')))
    print(Profile.objects.filter(name='홍길동').aggregate(
        Count('age')))  # filter로 where 조건을 나타낸다.
    #이름이 홍길동인 튜플들 갯수 출력

    print(len(profile_list))
    # values() + aggregate() 그룹별 평균 나이는?
    qs = Profile.objects.values('name').annotate(
        Avg('age'))  # name별로 그룹을 묶어서 age의 평균을 출력.
    for r in qs:
        print(r)

    # 결과를 list로 감싸서 dict type으로 클라이언트에게 출력하기
    pro_list = []

    for pro in profile_list:
        pro_dict = {}
        pro_dict['name'] = pro.name
        pro_dict['age'] = pro.age
        pro_list.append(pro_dict)
        print(pro_list)

    context = {'pro_dicts': pro_list}

    return render(request, 'abc.html', context)
示例#21
0
    def save(self, *args, **kwargs):
        if self.parent and self.share_of:
            raise ValueError("Can't be both a reply and a share!")
        self.cache_data()

        if self.parent:
            self.content_type = ContentType.REPLY
            # Ensure replies have sane values
            self.visibility = self.parent.visibility
            self.pinned = False
        elif self.share_of:
            self.content_type = ContentType.SHARE

        if not self.pk:
            if not self.guid:
                self.guid = uuid4()
            if self.pinned:
                max_order = Content.objects.top_level().filter(
                    author=self.author).aggregate(Max("order"))["order__max"]
                if max_order is not None:  # If max_order is None, there is likely to be no content yet
                    self.order = max_order + 1

        self.fix_local_uploads()
        super().save(*args, **kwargs)
        self.cache_related_object_data()
示例#22
0
def hits_per_interval(request, days=1):
    """
	hits per day to facebook.html or
	"""
    context = {}
    rows = []
    logs = Log.objects.order_by('time').all()
    # should include the timezone info in this
    dates = Log.objects.aggregate(Max('time'), Min('time'))
    min_date = dates['time__min'].date()
    max_date = dates['time__max'].date()
    dates = date_range(min_date, max_date, days)

    for from_date, to_date in pair_inter(dates):
        count = Log.objects.filter(
            Q(time__gte=from_date) & Q(time__lt=to_date)
            & (Q(request__startswith='GET /facebook.htm')
               | Q(request__startswith='GET /fb.htm'))).count()
        row = Object()
        row.date = from_date
        row.hits = count
        rows.append(row)

    context['rows'] = rows
    context['use_tabs'] = request.GET.get('use_tabs') in ['1', 'true', 'True']
    return render_to_response('analytics/hits.html', context, mimetype='text')
示例#23
0
 def max_pk():
     """Returns the sum of all the highest PKs for each submodel."""
     return reduce(
         lambda x, y: x + y,
         [int(p.objects.aggregate(Max('pk')).values()[0] or 0)
          for p in AbstractPage.__subclasses__()],
     )
示例#24
0
 def test_fail_update(self):
     """Window expressions can't be used in an UPDATE statement."""
     msg = 'Window expressions are not allowed in this query'
     with self.assertRaisesMessage(FieldError, msg):
         Employee.objects.filter(department='Management').update(
             salary=Window(expression=Max('salary'), partition_by='department'),
         )
示例#25
0
    def handle(self, *args, **options):
        print()
        print("==========================================================#\n"
              "#                                                         #\n"
              "# EOS Block History Scanner                               #\n"
              "# (C) 2019 Privex Inc.        Released under GNU AGPLv3   #\n"
              "#                                                         #\n"
              "# github.com/Privex/EOSHistory                            #\n"
              "#                                                         #\n"
              "#=========================================================#\n")
        print()
        log.info(
            ' >>> Started SYNC_BLOCKS Django command. Booting up AsyncIO event loop. '
        )
        last_block, start_type = options['start_block'], options['start_type']
        if options['start_block'] is None:
            last_block = settings.EOS_START_BLOCK
            if EOSBlock.objects.count() > 0:
                last_block = EOSBlock.objects.aggregate(
                    Max('number'))['number__max']
                start_type = 'exact'
                log.info(
                    'Found existing blocks. Starting from block %d (changed start_type to exact)',
                    last_block)

        asyncio.run(
            self.sync_blocks(start_block=last_block, start_type=start_type))
示例#26
0
文件: misc.py 项目: ahmedaffes/MAGE
def welcome(request):
    latest_setname = {}
    latest_date = {}
    envts = []
    link_title = None
    ck = make_template_fragment_key('welcome_all')
    p = cache.get(ck)
    if p is None:
        link_title = getParam('LINKS_TITLE')
        envts = Environment.objects_active.annotate(latest_reconfiguration=Max(
            'component_instances__configurations__id')).order_by('name')
        for e in envts:
            if e.latest_reconfiguration:
                cic = ComponentInstanceConfiguration.objects.select_related(
                    'result_of__belongs_to_set').get(
                        pk=e.latest_reconfiguration)
                latest_setname[e.name] = cic.result_of.belongs_to_set.name
                latest_date[e.name] = cic.created_on

    return render(
        request, 'ref/welcome.html', {
            'team_links_title': link_title,
            'team_links': Link.objects.all(),
            'latest_setname': latest_setname,
            'latest_date': latest_date,
            'envts': envts,
            'templates': Environment.objects.filter(template_only=True)
        })
示例#27
0
    def rows(self):
        soh_data = []

        if 'data_config' in self.config:
            data_config = self.config['data_config']
            late = data_config.soh_data_total.get('late', 0)
            on_time = data_config.soh_data_total.get('on_time', 0)
            soh_data.append(
                GroupSummary(title=SupplyPointStatusTypes.SOH_FACILITY,
                             responded=late + on_time,
                             on_time=on_time,
                             complete=late + on_time,
                             total=len(data_config.descendants)))
            return soh_data

        if self.config['org_summary']:
            try:
                sohs = GroupSummary.objects.filter(
                    title=SupplyPointStatusTypes.SOH_FACILITY,
                    org_summary__in=self.config['org_summary']).aggregate(
                        Avg('responded'), Avg('on_time'), Avg('complete'),
                        Max('total'))

                soh_data.append(
                    GroupSummary(title=SupplyPointStatusTypes.SOH_FACILITY,
                                 responded=sohs['responded__avg'],
                                 on_time=sohs['on_time__avg'],
                                 complete=sohs['complete__avg'],
                                 total=sohs['total__max']))
            except GroupSummary.DoesNotExist:
                return soh_data
        return soh_data
示例#28
0
def add_company_holidays(request):
    if not request.user.is_authenticated:
        return HttpResponseRedirect(reverse('login'))
    user_id = request.user.id
    system_access = True
    system_user = models.Profile.objects.all().filter(user_id=user_id)
    if request.user.username != "system_admin" and not system_user.exists():
        system_access = False
    all_user = models.Profile.objects.all().filter(user_id=user_id,
                                                   access__access_level='All')
    abs_user = models.Profile.objects.all().filter(
        user_id=user_id, access__access_level='Absences')
    if request.user.username != "system_admin" and not all_user.exists(
    ) and not abs_user.exists():
        return HttpResponseRedirect(reverse('home'))
    if 'export_holidays' in request.POST:
        return export_holidays()
    now = datetime.datetime.now()
    upcoming_holidays = models.company_holidays.objects.filter(
        is_finalized=True,
        holiday_date__gte=now).order_by('holiday_date', 'location')
    control_date = models.company_holidays.objects.all().filter(is_finalized=True) \
        .values('location__location') \
        .annotate(max_holiday_date=Max('holiday_date'))

    context = {
        'control_date': control_date,
        'upcoming_holidays': upcoming_holidays,
        'system_access': system_access,
    }
    return render(request, 'absences/add_company_holidays.html', context)
示例#29
0
    def commit_pending(self, reason, request, skip_push=False):
        """Commit any pending changes."""
        if not self.unit_set.filter(pending=True).exists():
            return False

        self.log_info('committing pending changes (%s)', reason)

        with self.component.repository.lock:
            while True:
                # Find oldest change break loop if there is none left
                try:
                    unit = self.unit_set.filter(
                        pending=True,
                        change__action__in=Change.ACTIONS_CONTENT,
                        change__user__isnull=False,
                    ).annotate(Max('change__timestamp')).order_by(
                        'change__timestamp__max')[0]
                except IndexError:
                    break
                # Can not use get as there can be more with same timestamp
                change = unit.change_set.content().filter(
                    timestamp=unit.change__timestamp__max)[0]

                author_name = change.author.get_author_name()

                # Flush pending units for this author
                self.update_units(author_name, change.author.id)

                # Commit changes
                self.git_commit(request,
                                author_name,
                                change.timestamp,
                                skip_push=skip_push)
        return True
示例#30
0
    def clean(self):
        are_numar_sfarsit = "numar_sfarsit" in self.cleaned_data and self.cleaned_data['numar_sfarsit']
        if are_numar_sfarsit and self.cleaned_data['numar_sfarsit'] <= self.cleaned_data['numar_inceput']:
            raise ValidationError(u"Numărul de sfârșit poate să fie mai mare decât numărul de început")

        intervale = self.get_intervale()
        interval_deschis = intervale.filter(numar_sfarsit__isnull = True)

        if are_numar_sfarsit:
            #   daca exista deja registre cu interval deschis
            if interval_deschis.count() and self.cleaned_data["numar_sfarsit"] >= interval_deschis[0].numar_inceput:
                raise ValidationError(u"Suprapunere de numere cu registrul #%d" % interval_deschis[0].id)

            range_kwargs = {"numar_inceput__range" : (self.cleaned_data['numar_inceput'], self.cleaned_data['numar_sfarsit']),
                            "numar_sfarsit__range" : (self.cleaned_data['numar_inceput'], self.cleaned_data['numar_sfarsit'])}
            intervale_suprapuse = intervale.filter(**range_kwargs)
            if intervale_suprapuse.count():
                raise ValidationError(u"Suprapunere de numere cu %d alte registre, te rog să reverifici" % intervale_suprapuse.count())
        else:
            if interval_deschis.count():
                raise ValidationError(u"Nu se pot defini două registre cu numerotare deschisă (fără număr sfârșit) pe aceeași serie")
            else:
                if intervale.count():
                    if self.cleaned_data['numar_inceput'] <= intervale.aggregate(Max("numar_sfarsit")).get("numar_sfarsit__max"):
                        raise ValidationError(u"Suprapunere de numerotare cu un registru!")

        return self.cleaned_data