Beispiel #1
0
 def __init__(self, year: int, week_number: int):
     self._week = isoweek.Week(year, week_number)
     self.number = week_number
     self.year = year
     self.monday = day.Day(self._week.monday().isoformat())
     self.tuesday = day.Day(self._week.tuesday().isoformat())
     self.wednesday = day.Day(self._week.wednesday().isoformat())
     self.thursday = day.Day(self._week.thursday().isoformat())
     self.friday = day.Day(self._week.friday().isoformat())
     self.saturday = day.Day(self._week.saturday().isoformat())
     self.sunday = day.Day(self._week.sunday().isoformat())
Beispiel #2
0
def get_week_interval(year, week):
    """
    Get a tuple containing start date and end date for given year and week.
    """
    year = int(year)
    week = int(week)
    if year > datetime.now().year or year < 2010 or week < 1 or week > 52:
        raise WrongDateFormatException
    start = isoweek.Week(year, week).monday()
    end = start + relativedelta.relativedelta(days=7)
    return start, end
Beispiel #3
0
    def execute(self, *args, **options):
        begin, end = options['begin'], options['end']
        report = ""
        if all([begin, end]):
            begin = util.get_date(begin)
            end = util.get_date(end)
        else:
            if any([begin, end]):
                raise OrderError(
                    'You should specify a begin and an end date !', 'per_user')
            if options['month']:
                report = 'month'
                begin = date(options['year'], options['month'], 1)
                end = date(
                    options['year'], options['month'],
                    calendar.monthrange(options['year'], options['month'])[1])
            else:
                report = "week"
                week = isoweek.Week(options['year'], options['week'])
                begin = week.day(0)
                end = week.day(6)

        if options['team'] is not None:
            try:
                team = conf['teams'][options['team']]
            except KeyError:
                raise OrderError(
                    'Team {0} doesn\'t exist'.format(options['team']),
                    'per_user')
        else:
            team = None

        time_entries = api.get_time_entries(week.day(0), week.day(6), team)

        # make dict from list with id as key
        # {3: {'login': '******', 'id': 3, ...}}
        users = dict([(user['id'], user) for user in api.get_users(*team)])

        # initialize dict from the user dict keys (ids) with value 0.0
        per_user = dict.fromkeys(users.keys(), 0.0)
        for time_entry in time_entries:
            per_user[time_entry['user']['id']] += float(time_entry['hours'])

        template = template_env.get_template('default/per_user.temp')
        print template.render(report=report,
                              week=week,
                              begin=begin,
                              end=end,
                              per_user_items=per_user.items(),
                              total=sum(per_user.values()),
                              users=users)

        return 0
Beispiel #4
0
def _get_week_days(date, workdays_only):
    """Expand date to the surrounding days in that week.

    Args:
        date: the date which will be expanded into a week.
        workdays_only: whether to only return workdays (Mon - Fri).

    Returns:
        list of date objects that fall in the week of given date.
    """
    week_number = date.isocalendar()[1]
    week = isoweek.Week(date.year, week_number).days()
    return week[:5] if workdays_only else week
Beispiel #5
0
 def __call__(self, weeks, counties):
     res = np.zeros((len(weeks), len(counties), self.num_features), dtype=np.float32)
     for i,week in enumerate(weeks):
         for j,county in enumerate(counties):
             idx = ((isoweek.Week(*week)-self.time_horizon) <= self.data.index)*(self.data.index < week)
             # print("sampling week {} for county {} using data in range {}".format(week, county, idx))
             t_data, x_data = sample_time_and_space(self.data.iloc[idx], self.times_by_week, self.locations_by_county)
             t_pred, x_pred = sample_time_and_space(pd.DataFrame(self.num_tps, index=[week], columns=[county]), self.times_by_week, self.locations_by_county)
             res[i,j,:] = self.ia_bfs(self._to_timestamp(t_pred), x_pred, self._to_timestamp(t_data), x_data)
         frac = (i+1)/len(weeks)
         if self.verbose:
             print("⎹" + "█"*int(np.floor(frac*100)) + " ░▒▓█"[int(((frac*100)%1)*5)] + " "*int(np.ceil((1-frac)*100)) + "⎸ ({:.3}%)".format(100*frac), end="\r", flush=True)
     return res
Beispiel #6
0
def get_weeks_since(request, **kwargs):
    week = int(kwargs.get('week_id'))
    year = int(kwargs.get('year'))
    end = isoweek.Week(year, week).monday()
    start = request.user.get_last_level_day(week=week, year=year)

    from epilogue.utils import count_weeks
    weeks_since = count_weeks(start, end)

    if weeks_since == 0:
        return 1

    return weeks_since + 1
Beispiel #7
0
    def get_context_data(self, **kwargs):
        ctx = super().get_context_data()

        week = isoweek.Week(self.year, self.week)
        before = datetime(week.monday().year,
                          week.monday().month,
                          week.monday().day,
                          0,
                          0,
                          0,
                          tzinfo=UTC) - timedelta(days=1)
        after = datetime(week.sunday().year,
                         week.sunday().month,
                         week.sunday().day,
                         0,
                         0,
                         0,
                         tzinfo=UTC) + timedelta(days=1)

        ctx['date'] = week.monday()
        ctx['before'] = before
        ctx['after'] = after

        ebd = self._events_by_day(before, after)

        ctx['has_before'], ctx['has_after'] = has_before_after(
            self.request.organizer.events.filter(
                sales_channels__contains=self.request.sales_channel.identifier
            ),
            SubEvent.objects.filter(event__organizer=self.request.organizer,
                                    event__is_public=True,
                                    event__live=True,
                                    event__sales_channels__contains=self.
                                    request.sales_channel.identifier),
            before,
            after,
        )

        ctx['days'] = days_for_template(ebd, week)
        ctx['weeks'] = [
            (date_fromisocalendar(self.year, i + 1, 1),
             date_fromisocalendar(self.year, i + 1, 7)) for i in range(
                 53 if date(self.year, 12, 31).isocalendar()[1] == 53 else 52)
        ]
        ctx['years'] = range(now().year - 2, now().year + 3)
        ctx['week_format'] = get_format('WEEK_FORMAT')
        if ctx['week_format'] == 'WEEK_FORMAT':
            ctx['week_format'] = WEEK_FORMAT
        ctx['multiple_timezones'] = self._multiple_timezones

        return ctx
    def test_to_dict_partition_by_week_not_strict(self):
        results = self.instance.to_dict()
        start_w = isoweek.Week(self.start.year, self.start.isocalendar()[1])
        end_w = isoweek.Week(self.end.year, self.end.isocalendar()[1])
        f_start = datetime.combine(
            start_w.monday(),
            datetime.min.time()).strftime("%Y-%m-%d %H:%M:%S")
        f_end = datetime.combine(end_w.sunday(), datetime.min.time()).replace(
            hour=23, minute=59, second=59,
            microsecond=999999).strftime("%Y-%m-%d %H:%M:%S.%f")

        print(f_start, f_end)
        expected_results = {
            'name':
            'parent_table',
            'partition_prefix':
            'parent_table_y2019_w',
            'partitions': [],
            'catch_all_partition_name':
            'parent_table_catch_all',
            'partitioned_by':
            'week',
            'partition_field':
            'partition_field',
            'field_value':
            f'cast(extract(week from NEW.{self.partition_field}) AS TEXT)',
            'self_check':
            f"NEW.{self.partition_field} >= '{f_start}' "
            f"AND NEW.{self.partition_field} <= '{f_end}' "
        }

        self.assertTrue(len(results.keys()) == len(expected_results.keys()))
        for k, v in results.items():
            if k != 'partitions':
                print(results[k])
                print(expected_results[k])
                self.assertTrue(results[k] == expected_results[k])
Beispiel #9
0
    def updateHeader(self, firstWeekNumber):
        def week_range(date):
            # TODO process year end week number wrap
            """
            Find the first/last day of the week for the given day.
            Starts Mon ends Sun.

            Returns a tuple of ``(start_date, end_date)``.
            """
            # dow is Mon = 1 ... Sun = 7
            yr, wk, dow = date.isocalendar()

            # find the first day of the week
            if dow == 1:
                start_date = date
            else:
                start_date = date - datetime.timedelta(dow - 1)

            end_date = start_date + datetime.timedelta(4)

            return start_date, end_date

        self._header.clear()
        self._header = self._defaultHeader.copy()
        self._firstWeekNumber = firstWeekNumber  # needed to trigger header redraw

        self._weeksInHeader.clear()

        current_year = datetime.datetime.now().date().isocalendar()[0]
        week = isoweek.Week(current_year, self._firstWeekNumber)
        last_week = week.last_week_of_year(current_year)

        for i in range(self.ColumnCount - 5):
            d1, d2 = week_range(week.monday() + datetime.timedelta(7 * i))
            num = week.year_week()[1] + i
            year = current_year
            if num > last_week.week:
                num = num % last_week.week
                year = year + 1
            self._header.append(f'{num}: {d1:%d.%m} - {d2:%d.%m}')
            self._weeksInHeader.append([year, num, 1])

        self.headerDataChanged.emit(Qt.Horizontal,
                                    self.ColumnCount - self.WeekCount,
                                    self.ColumnCount)
Beispiel #10
0
def get_week_time_spents(person_id, year, week):
    """
    Return aggregated time spents at task level for given person and week.
    """
    year = int(year)
    week = int(week)
    if year > datetime.datetime.now().year \
       or year < 2010 or week < 1 or week > 52:
        raise WrongDateFormatException

    date = isoweek.Week(year, week).monday()
    next_week = date + relativedelta.relativedelta(days=7)

    entries = get_person_time_spent_entries(
        person_id, TimeSpent.date >= date.strftime("%Y-%m-%d"),
        TimeSpent.date < next_week.strftime("%Y-%m-%d"))

    return build_results(entries)
Beispiel #11
0
def should_we_download_this(rarname: str, start_month: int, end_month: int,
                            handled_files: set) -> bool:
    """
    Check if we should download this rar files

    we do checks on age
    """
    file_month = None
    # 2018 names
    year = YEAR_RE.findall(rarname)
    week = WEEK_RE.match(rarname)

    if year and week:
        weeknr = int(week.groups()[1])
        w = isoweek.Week(int(year[0]), weeknr)
        d = w.monday()
        file_month = int('%d%02d' % (d.year, d.month))

    else:
        # 2017 names
        m = DATE_RE.findall(rarname)
        if m:
            file_month = int(m[0])

    if not file_month:
        log.debug('date not parsed from file name: %s', rarname)
        return False

    if start_month:
        if file_month < start_month:
            log.debug('skipped %s, too old', rarname)
            return False
    if end_month:
        if file_month >= end_month:
            log.debug('skipped %s, too new', rarname)
            return False

    if rarname in handled_files:
        log.debug('skipped %s file previously added', rarname)
        return False

    return True
    def mapper(self, line):
        value = self.get_event_and_date_string(line)

        if value is None:
            return
        event, date_string = value

        username = eventlog.get_event_username(event)

        if not username:
            log.error("Encountered event with no username: %s", event)
            self.incr_counter('Active Users last year', 'Discard Event Missing username', 1)
            return

        date = datetime.date(*[int(x) for x in date_string.split('-')])
        iso_year, iso_weekofyear, _iso_weekday = date.isocalendar()
        week = isoweek.Week(iso_year, iso_weekofyear)
        start_date = week.monday().isoformat()
        end_date = (week.sunday() + datetime.timedelta(1)).isoformat()

        yield (start_date, end_date, username), 1
Beispiel #13
0
    def buildWeekSelectionCombo(self):
        # TODO if more settings is needed, move all settings-related code to a separate class
        year, week, day = datetime.datetime.now().isocalendar()
        week_list = list()
        for i in range(1, isoweek.Week.last_week_of_year(year).week + 1):
            w = isoweek.Week(year, i)
            week_list.append(
                str(i) + ': ' + str(w.monday().strftime('%d.%m')) + '-' +
                str(w.friday().strftime('%d.%m')))

        self.ui.comboWeek.addItems(week_list)

        # TODO read settings
        if os.path.isfile('settings.ini'):
            with open('settings.ini', mode='tr') as f:
                line = f.readline()
            index = int(line.split('=')[1])
        else:
            index = week

        self.ui.comboWeek.setCurrentIndex(index - 1)
        self._modelBillPlan.updateHeader(index)
Beispiel #14
0
def check_rating_date():
    """
    Task to sort rating creation date
    Returns:
        rerecalculated (int): how mush ratings have been recalculated
    """
    recalculated = 0
    ratings = Rating.objects.all()
    for rating in ratings:
        week = rating.week
        month = rating.month
        year = rating.year
        date = rating.created
        if week:
            w = isoweek.Week(year=year, week=week)
            date = w.monday()
        if month:
            date = datetime(year=year, month=month, day=1)
        rating.created = date
        rating.save()
        recalculated += 1
    return recalculated
Beispiel #15
0
    def get_context_data(self, **kwargs):
        ctx = super().get_context_data()

        week = isoweek.Week(self.year, self.week)
        before = datetime(week.monday().year,
                          week.monday().month,
                          week.monday().day,
                          0,
                          0,
                          0,
                          tzinfo=UTC) - timedelta(days=1)
        after = datetime(week.sunday().year,
                         week.sunday().month,
                         week.sunday().day,
                         0,
                         0,
                         0,
                         tzinfo=UTC) + timedelta(days=1)

        ctx['date'] = week.monday()
        ctx['before'] = before
        ctx['after'] = after

        ebd = self._events_by_day(before, after)

        ctx['days'] = days_for_template(ebd, week)
        ctx['weeks'] = [
            (date_fromisocalendar(self.year, i + 1, 1),
             date_fromisocalendar(self.year, i + 1, 7)) for i in range(
                 53 if date(self.year, 12, 31).isocalendar()[1] == 53 else 52)
        ]
        ctx['years'] = range(now().year - 2, now().year + 3)
        ctx['week_format'] = get_format('WEEK_FORMAT')
        if ctx['week_format'] == 'WEEK_FORMAT':
            ctx['week_format'] = WEEK_FORMAT
        ctx['multiple_timezones'] = self._multiple_timezones

        return ctx
Beispiel #16
0
def end_of_period(period_id):
    assert is_period_id(period_id)
    pid = period_id
    year = int(period_id[0:4])
    if len(pid) > 5:
        part2 = int(pid[5:])  #month, week, quarter
        sep = pid[4]
    else:
        part2 = None
        sep = 'Y'
    if sep == 'W':
        import isoweek
        return isoweek.Week(year, part2).sunday()
    elif sep == 'M':
        return end_of_month(date(year, part2, 1))
    elif sep == 'Q':
        return end_of_quarter(date(year, (part2 * 3), 1))
    elif sep == 'H':
        return end_of_half(date(year, (part2 * 6), 1))
    elif sep == 'Y':
        return date(year, 12, 31)
    else:
        raise ValueError("Unexpected date identifier %s" % id)
Beispiel #17
0
 def add(self, userid, isoYear, isoWeek):
     """@note: This is the place to execute the consistency checks to the
     best of the capabilites provided by the storage."""
     week = isoweek.Week(isoYear, isoWeek)
     key = week.isoformat()
     flex, vacation = getDelta(self.dayInstance, userid, week)
     prevInDb = (DbWeeklyReport.query(
         ancestor=pkstr_key(userid)).order(-DbWeeklyReport.key).get())
     prevWeek = week - 1
     if prevInDb:
         if not (prevInDb.year == prevWeek.year
                 and prevInDb.week == prevWeek.week):
             raise ConsistencyError('locked weeks must be continouos')
         flex += prevInDb.flex
         vacation += prevInDb.vacation
     report = DbWeeklyReport(parent=pkstr_key(userid),
                             id=key,
                             year=isoYear,
                             week=isoWeek,
                             flex=flex,
                             vacation=vacation)
     report.put()
     return {}
Beispiel #18
0
    def get_todo_count(self, merchants, date, period=PERIOD_MONTH):
        """Method to calculate to-do count.

        Args:
          merchant (object): merchant to calculate to-do for.
          date (datetime): date to find period for to-do calculations.

        Returns:
          total (int): to-do overall.
          solved (int): solved to-do.

        """
        if period == PERIOD_WEEK:
            week = isoweek.Week(date.year, date.isocalendar()[1])
            first = week.monday()
            last = week.sunday()
        else:
            first = timezone.datetime(date.year, date.month, day=1)
            last = timezone.datetime(date.year, date.month,
                                     monthrange(date.year, date.month)[1])

        merchants_values = merchants.values_list('id', 'sources')
        merchants_ids = [i[0] for i in merchants_values]
        merchants_sources = set(
            chain.from_iterable([i[1] for i in merchants_values]))
        todo_set = self.filter(
            mention__merchant_id__in=merchants_ids,
            mention__origin_site__in=merchants_sources,
            created__gte=first,
            created__lte=last,
        ).exclude(mention__status=STATUS_FLAGGED)
        total = todo_set.count()
        solved = todo_set.filter(is_closed=True).count()
        todo_count = namedtuple('todo_count', ('total', 'solved'))

        return todo_count(total=total, solved=solved)
Beispiel #19
0
def start_of_period(period_id):
    pid = period_id
    assert is_period_id(pid)
    year = int(pid[0:4])
    if len(pid) > 5:
        part2 = int(pid[5:])  #month, week, quarter
        sep = pid[4]
    else:
        part2 = None
        sep = 'Y'
    if sep == 'W':
        import isoweek
        return isoweek.Week(year, part2).monday()
    elif sep == 'M':
        return date(year, part2, 1)
    elif sep == 'Q':
        month_id = (part2 - 1) * 3 + 1
        return date(year, month_id, 1)
    elif sep == 'H':
        return start_of_half(date(year, (part2 * 6), 1))
    elif sep == 'Y':
        return date(year, 1, 1)
    else:
        raise ValueError("Unexpected date identifier %s" % id)
Beispiel #20
0
 def monday(self):
     return isoweek.Week(self.year, self.week_id)
Beispiel #21
0
    def _get_event_list(self, request, **kwargs):
        data = {}
        o = getattr(request, 'event', request.organizer)
        list_type = self.request.GET.get("style", o.settings.event_list_type)
        data['list_type'] = list_type

        if hasattr(self.request,
                   'event') and data['list_type'] not in ("calendar", "week"):
            # only allow list-view of more than 50 subevents if ordering is by data as this can be done in the database
            # ordering by name is currently not supported in database due to I18NField-JSON
            ordering = self.request.event.settings.get(
                'frontpage_subevent_ordering',
                default='date_ascending',
                as_type=str)
            if ordering not in ("date_ascending", "date_descending"
                                ) and self.request.event.subevents.filter(
                                    date_from__gt=now()).count() > 50:
                if self.request.event.settings.event_list_type not in (
                        "calendar", "week"):
                    self.request.event.settings.event_list_type = "calendar"
                data['list_type'] = list_type = 'calendar'

        if hasattr(self.request, 'event'):
            data['name'] = str(request.event.name)
            data['frontpage_text'] = str(
                rich_text(request.event.settings.frontpage_text,
                          safelinks=False))

        cache_key = ':'.join([
            'widget.py',
            'eventlist',
            request.organizer.slug,
            request.event.slug if hasattr(request, 'event') else '-',
            list_type,
            request.GET.urlencode(),
            get_language(),
        ])
        cached_data = cache.get(cache_key)
        if cached_data:
            return self.response(cached_data)

        if list_type == "calendar":
            self._set_month_year()
            _, ndays = calendar.monthrange(self.year, self.month)

            data['date'] = date(self.year, self.month, 1)
            if hasattr(self.request, 'event'):
                tz = pytz.timezone(self.request.event.settings.timezone)
            else:
                tz = pytz.UTC
            before = datetime(self.year, self.month, 1, 0, 0, 0,
                              tzinfo=tz) - timedelta(days=1)
            after = datetime(self.year, self.month, ndays, 0, 0, 0,
                             tzinfo=tz) + timedelta(days=1)

            ebd = defaultdict(list)

            if hasattr(self.request, 'event'):
                add_subevents_for_days(
                    filter_qs_by_attr(
                        self.request.event.subevents_annotated('web').filter(
                            event__sales_channels__contains=self.request.
                            sales_channel.identifier), self.request), before,
                    after, ebd, set(), self.request.event,
                    kwargs.get('cart_namespace'))
            else:
                timezones = set()
                add_events_for_days(
                    self.request,
                    filter_qs_by_attr(
                        Event.annotated(
                            self.request.organizer.events,
                            'web').filter(sales_channels__contains=self.
                                          request.sales_channel.identifier),
                        self.request), before, after, ebd, timezones)
                add_subevents_for_days(
                    filter_qs_by_attr(
                        SubEvent.annotated(
                            SubEvent.objects.filter(
                                event__organizer=self.request.organizer,
                                event__is_public=True,
                                event__live=True,
                                event__sales_channels__contains=self.request.
                                sales_channel.identifier).prefetch_related(
                                    'event___settings_objects',
                                    'event__organizer___settings_objects')),
                        self.request), before, after, ebd, timezones)

            data['weeks'] = weeks_for_template(ebd, self.year, self.month)
            for w in data['weeks']:
                for d in w:
                    if not d:
                        continue
                    d['events'] = self._serialize_events(d['events'] or [])
        elif list_type == "week":
            self._set_week_year()

            if hasattr(self.request, 'event'):
                tz = pytz.timezone(self.request.event.settings.timezone)
            else:
                tz = pytz.UTC

            week = isoweek.Week(self.year, self.week)
            data['week'] = [self.year, self.week]
            before = datetime(week.monday().year,
                              week.monday().month,
                              week.monday().day,
                              0,
                              0,
                              0,
                              tzinfo=tz) - timedelta(days=1)
            after = datetime(week.sunday().year,
                             week.sunday().month,
                             week.sunday().day,
                             0,
                             0,
                             0,
                             tzinfo=tz) + timedelta(days=1)

            ebd = defaultdict(list)
            if hasattr(self.request, 'event'):
                add_subevents_for_days(
                    filter_qs_by_attr(
                        self.request.event.subevents_annotated('web'),
                        self.request), before, after, ebd, set(),
                    self.request.event, kwargs.get('cart_namespace'))
            else:
                timezones = set()
                add_events_for_days(
                    self.request,
                    filter_qs_by_attr(
                        Event.annotated(self.request.organizer.events, 'web'),
                        self.request), before, after, ebd, timezones)
                add_subevents_for_days(
                    filter_qs_by_attr(
                        SubEvent.annotated(
                            SubEvent.objects.filter(
                                event__organizer=self.request.organizer,
                                event__is_public=True,
                                event__live=True,
                            ).prefetch_related(
                                'event___settings_objects',
                                'event__organizer___settings_objects')),
                        self.request), before, after, ebd, timezones)

            data['days'] = days_for_template(ebd, week)
            for d in data['days']:
                d['events'] = self._serialize_events(d['events'] or [])
        else:
            offset = int(self.request.GET.get("offset", 0))
            limit = 50
            if hasattr(self.request, 'event'):
                evs = self.request.event.subevents_sorted(
                    filter_qs_by_attr(
                        self.request.event.subevents_annotated(
                            self.request.sales_channel.identifier),
                        self.request))
                ordering = self.request.event.settings.get(
                    'frontpage_subevent_ordering',
                    default='date_ascending',
                    as_type=str)
                data['has_more_events'] = False
                if ordering in ("date_ascending", "date_descending"):
                    # fetch one more result than needed to check if more events exist
                    evs = list(evs[offset:offset + limit + 1])
                    if len(evs) > limit:
                        data['has_more_events'] = True
                        evs = evs[:limit]

                tz = pytz.timezone(request.event.settings.timezone)
                if self.request.event.settings.event_list_available_only:
                    evs = [
                        se for se in evs if not se.presale_has_ended and (
                            se.best_availability_state is not None
                            and se.best_availability_state >=
                            Quota.AVAILABILITY_RESERVED)
                    ]

                data['events'] = [{
                    'name':
                    str(ev.name),
                    'location':
                    str(ev.location),
                    'date_range':
                    self._get_date_range(ev, ev.event, tz),
                    'availability':
                    self._get_availability(ev, ev.event, tz=tz),
                    'event_url':
                    build_absolute_uri(ev.event, 'presale:event.index'),
                    'subevent':
                    ev.pk,
                } for ev in evs]
            else:
                data['events'] = []
                qs = self._get_event_queryset()
                for event in qs:
                    tz = pytz.timezone(
                        event.cache.get_or_set(
                            'timezone', lambda: event.settings.timezone))
                    if event.has_subevents:
                        dr = daterange(event.min_from.astimezone(tz),
                                       (event.max_fromto or event.max_to
                                        or event.max_from).astimezone(tz))
                        avail = {
                            'color': 'none',
                            'text': gettext('Event series')
                        }
                    else:
                        dr = self._get_date_range(event, event, tz)
                        avail = self._get_availability(event, event, tz=tz)
                    data['events'].append({
                        'name':
                        str(event.name),
                        'location':
                        str(event.location),
                        'date_range':
                        dr,
                        'availability':
                        avail,
                        'event_url':
                        build_absolute_uri(event, 'presale:event.index'),
                    })

        cache.set(cache_key, data, 30)
        # These pages are cached for a really short duration – this should make them pretty accurate, while still
        # providing some protection against burst traffic.
        return self.response(data)
Beispiel #22
0
    def execute(self, *args, **options):
        begin, end = options['begin'], options['end']
        report = ""
        if all([begin, end]):
            begin = util.get_date(begin)
            end = util.get_date(end)
        else:
            if any([begin, end]):
                raise OrderError(
                    'You should specify a begin and an end date !',
                    'per_project')
            if options['month']:
                report = 'month'
                begin = date(options['year'], options['month'], 1)
                end = date(
                    options['year'], options['month'],
                    calendar.monthrange(options['year'], options['month'])[1])
            else:
                report = "week"
                week = isoweek.Week(options['year'], options['week'])
                begin = week.day(0)
                end = week.day(6)

        if options['team'] is not None:
            try:
                team = conf['teams'][options['team']]
            except KeyError:
                raise OrderError(
                    'Team {0} doesn\'t exist'.format(options['team']),
                    'per_project')
        else:
            team = None

        time_entries = api.get_time_entries(begin, end, team)

        per_project = {}
        for time_entry in time_entries:
            project_name = time_entry['project']['name']
            if project_name not in per_project:
                per_project[project_name] = {
                    'time': float(time_entry['hours']),
                    'id': time_entry['project']['id']
                }
            else:
                per_project[project_name]['time'] += float(time_entry['hours'])

        per_project_group = {}
        for project_name, project_data in per_project.items():
            parent = api.get_parent_project(project_data['id'], last=True)
            if parent['name'] not in per_project_group:
                per_project_group[parent['name']] = {
                    'projects': [{
                        project_name: project_data['time']
                    }],
                    'time': float(project_data['time'])
                }
            else:
                per_project_group[parent['name']]['time'] += float(
                    project_data['time'])
                per_project_group[parent['name']]['projects'].append(
                    {project_name: project_data['time']})

        template = template_env.get_template('default/per_project.temp')
        print template.render(
            report=report,
            week=week,
            begin=begin,
            end=end,
            per_project_group_items=per_project_group.items(),
            total=sum(data['time'] for data in per_project_group.values()))

        return 0
Beispiel #23
0
def crashes_per_day(request, default_context=None):
    context = default_context or {}
    context['products'] = context['active_versions'].keys()

    # This report does not currently support doing a graph by **build date**.
    # So we hardcode the choice to always be regular report.
    # The reason for not entirely deleting the functionality is because
    # we might support it later in the future.
    # The only reason people might get to this page with a
    # date_range_type=build set is if they used the old daily report
    # and clicked on the link to the new Crashes per User report.
    if request.GET.get('date_range_type') == 'build':
        params = dict(request.GET)
        params.pop('date_range_type')
        url = reverse('crashstats:crashes_per_day')
        url += '?' + urllib.urlencode(params, True)
        messages.warning(
            request, 'The Crashes per User report does not support filtering '
            'by *build* date. ')
        return redirect(url)

    platforms_api = models.Platforms()
    platforms = platforms_api.get()

    date_range_types = ['report', 'build']
    hang_types = ['any', 'crash', 'hang-p']
    form = forms.DailyFormByVersion(
        context['active_versions'],
        platforms,
        data=request.GET,
        date_range_types=date_range_types,
        hang_types=hang_types,
    )
    if not form.is_valid():
        return http.HttpResponseBadRequest(str(form.errors))

    params = form.cleaned_data
    params['product'] = params.pop('p')
    params['versions'] = sorted(list(set(params.pop('v'))), reverse=True)
    try:
        params['platforms'] = params.pop('os')
    except KeyError:
        params['platforms'] = None

    if len(params['versions']) > 0:
        context['version'] = params['versions'][0]

    context['product'] = params['product']

    if not params['versions']:
        # need to pick the default featured ones
        params['versions'] = []
        for pv in context['active_versions'][context['product']]:
            if pv['is_featured']:
                params['versions'].append(pv['version'])

    context['available_versions'] = []
    for version in context['active_versions'][params['product']]:
        context['available_versions'].append(version['version'])

    if not params.get('platforms'):
        params['platforms'] = [
            x['name'] for x in platforms if x.get('display')
        ]

    context['platforms'] = params.get('platforms')

    end_date = params.get('date_end') or datetime.datetime.utcnow()
    if isinstance(end_date, datetime.datetime):
        end_date = end_date.date()
    start_date = (params.get('date_start')
                  or end_date - datetime.timedelta(weeks=2))
    if isinstance(start_date, datetime.datetime):
        start_date = start_date.date()

    context['start_date'] = start_date.strftime('%Y-%m-%d')
    context['end_date'] = end_date.strftime('%Y-%m-%d')

    context['duration'] = abs((start_date - end_date).days)
    context['dates'] = utils.daterange(start_date, end_date)

    context['hang_type'] = params.get('hang_type') or 'any'

    context['date_range_type'] = params.get('date_range_type') or 'report'

    _date_range_type = params.pop('date_range_type')
    if _date_range_type == 'build':
        params['_histogram.build_id'] = ['version']
        params['_histogram_interval.build_id'] = 1000000
    else:
        params['_histogram.date'] = ['version']
    params['_facets'] = ['version']

    params.pop('date_end')

    params.pop('date_start')
    if _date_range_type == 'build':
        params['build_id'] = [
            '>=' + start_date.strftime('%Y%m%d000000'),
            '<' + end_date.strftime('%Y%m%d000000'),
        ]
    else:
        params['date'] = [
            '>=' + start_date.strftime('%Y-%m-%d'),
            '<' + end_date.strftime('%Y-%m-%d'),
        ]

    params['_results_number'] = 0  # because we don't care about hits
    params['_columns'] = ('date', 'version', 'platform', 'product')
    if params['hang_type'] == 'crash':
        params['hang_type'] = '0'
    elif params['hang_type'] == 'hang-p':
        params['hang_type'] = '-1'
    else:
        params.pop('hang_type')

    # supersearch expects the parameter `versions` (a list or tuple)
    # to be called `version`
    supersearch_params = copy.deepcopy(params)
    supersearch_params['version'] = supersearch_params.pop('versions')
    supersearch_params['platform'] = supersearch_params.pop('platforms')
    # in SuperSearch it's called 'Mac' not 'Mac OS X'
    if 'Mac OS X' in supersearch_params['platform']:
        supersearch_params['platform'].append('Mac')
        supersearch_params['platform'].remove('Mac OS X')

    if params['product'] == 'FennecAndroid':
        # FennecAndroid only has one platform and it's "Android"
        # so none of the options presented in the crashes_per_day.html
        # template are applicable.
        del supersearch_params['platform']

    try:
        graph_data, results, adi_by_version = _get_crashes_per_day_with_adu(
            supersearch_params, start_date, end_date, platforms,
            _date_range_type)
    except BadArgumentError as exception:
        return http.HttpResponseBadRequest(unicode(exception))

    render_csv = request.GET.get('format') == 'csv'
    data_table = {'totals': {}, 'dates': {}}
    facets = results['facets']
    has_data_versions = set()
    if _date_range_type == 'build':
        histogram = facets['histogram_build_id']
    else:
        histogram = facets['histogram_date']

    for group in histogram:
        if _date_range_type == 'build':
            date = build_id_to_date(group['term'])
        else:
            date = group['term'].split('T')[0]
        if date not in data_table['dates']:
            data_table['dates'][date] = []
        sorted_by_version = sorted(group['facets']['version'],
                                   key=itemgetter('term'),
                                   reverse=True)

        for facet_group in sorted_by_version:
            term = facet_group['term']
            has_data_versions.add(term)

            count = facet_group['count']
            adi_groups = adi_by_version[term]
            if date in adi_groups:
                total, throttle = adi_groups[date]
                if total:
                    ratio = round(100.0 * count / total / throttle, 3)
                else:
                    ratio = 0.0

                # Why do we divide the count by the throttle?!
                # Consider the case of Release. That one we throttle to 10%
                # meaning that if we received 123 crashes, it happened to
                # about 1230 people actually. We just "discarded" 90% of the
                # records.
                # But, why divide? Because throttle is a floating point
                # number between 0 and 1.0. If it's 1.0 it means we're taking
                # 100% and 234/1.0 == 234. If it's 0.1 it means that
                # 123/0.1 == 1230.
                report_count = int(count / throttle)
                item = {
                    'adi': total,
                    'date': date,
                    'ratio': ratio,
                    'report_count': report_count,
                    'product': params['product'],
                    'throttle': throttle,
                    'version': term,
                }
                # Because this code is using the `_render_daily_csv()` function
                # which is used by the old daily() view function, we have to
                # use the common (and old) names for certain keys
                if render_csv:
                    item['adu'] = item.pop('adi')
                    item['crash_hadu'] = item.pop('ratio')

                data_table['dates'][date].append(item)

    if _date_range_type == 'build':
        # for the Date Range = "Build Date" report, we only want to
        # include versions that had data.
        context['versions'] = list(has_data_versions)
    else:
        context['versions'] = params['versions']

    for date in data_table['dates']:
        data_table['dates'][date] = sorted(data_table['dates'][date],
                                           key=itemgetter('version'),
                                           reverse=True)

    if render_csv:
        return _render_daily_csv(
            request,
            data_table,
            params['product'],
            params['versions'],
            platforms,
            params['platforms'],
        )
    context['data_table'] = data_table
    context['graph_data'] = graph_data
    context['report'] = 'daily'

    errors = []
    for error in results.get('errors', []):
        if not error['type'] == 'shards':
            continue

        week = int(error['index'][-2:])
        year = int(error['index'][-6:-2])
        day = isoweek.Week(year, week).monday()
        percent = error['shards_count'] * 100 / settings.ES_SHARDS_PER_INDEX
        errors.append(
            'The data for the week of {} is ~{}% lower than expected.'.format(
                day, percent))
    context['errors'] = errors

    return render(request, 'crashstats/crashes_per_day.html', context)
Beispiel #24
0
def prepare_data(files_path, preprocessed_train_path, preprocessed_test_path):
    print("Preparing data...")
    with tqdm(total=16) as pbar:
        table_names = [
            'train', 'store', 'store_states', 'state_names', 'googletrend',
            'weather', 'test'
        ]
        train, store, store_states, state_names, googletrend, weather, test = \
            [pd.read_csv(os.path.join(files_path, f'{fname}.csv'), low_memory=False) for fname in table_names]

        # Turn state Holidays to boolean
        train.StateHoliday = train.StateHoliday != '0'
        test.StateHoliday = test.StateHoliday != '0'

        # Join tables
        weather = tmerger.join_df(weather, state_names, "file", "StateName")
        pbar.update(1)

        # Replace all instances of state name 'NI' to match the usage in the rest of the data: 'HB,NI'
        googletrend['Date'] = googletrend.week.str.split(' - ', expand=True)[0]
        googletrend['State'] = googletrend.file.str.split('_', expand=True)[2]
        googletrend.loc[googletrend.State == 'NI', "State"] = 'HB,NI'
        pbar.update(1)

        # Extracts particular date fields from a complete datetime for the purpose of constructing categoricals
        edate.get_datepart(weather, "Date", drop=False, inplace=True)
        edate.get_datepart(googletrend, "Date", drop=False, inplace=True)
        edate.get_datepart(train, "Date", drop=False, inplace=True)
        edate.get_datepart(test, "Date", drop=False, inplace=True)

        edate.get_elapsed(weather, "Date", inplace=True)
        edate.get_elapsed(googletrend, "Date", inplace=True)
        edate.get_elapsed(train, "Date", inplace=True)
        edate.get_elapsed(test, "Date", inplace=True)

        # The Google trends data has a special category for the whole of the US
        trend_de = googletrend[googletrend.file == 'Rossmann_DE']
        pbar.update(1)

        # Outer join to a single dataframe
        store = tmerger.join_df(store, store_states, "Store")
        joined = tmerger.join_df(train, store, "Store")
        joined_test = tmerger.join_df(test, store, "Store")
        joined = tmerger.join_df(joined, googletrend,
                                 ["State", "Year", "Week"])
        joined_test = tmerger.join_df(joined_test, googletrend,
                                      ["State", "Year", "Week"])
        joined = joined.merge(trend_de,
                              'left', ["Year", "Week"],
                              suffixes=('', '_DE'))
        joined_test = joined_test.merge(trend_de,
                                        'left', ["Year", "Week"],
                                        suffixes=('', '_DE'))
        joined = tmerger.join_df(joined, weather, ["State", "Date"])
        joined_test = tmerger.join_df(joined_test, weather, ["State", "Date"])
        for df in (joined, joined_test):
            for c in df.columns:
                if c.endswith('_y'):
                    if c in df.columns:
                        df.drop(c, inplace=True, axis=1)
        pbar.update(1)

        for df in (joined, joined_test):
            # Fill in missing values to avoid complications
            df['CompetitionOpenSinceYear'] = df.CompetitionOpenSinceYear.fillna(
                1900).astype(np.int32)
            df['CompetitionOpenSinceMonth'] = df.CompetitionOpenSinceMonth.fillna(
                1).astype(np.int32)
            df['Promo2SinceYear'] = df.Promo2SinceYear.fillna(1900).astype(
                np.int32)
            df['Promo2SinceWeek'] = df.Promo2SinceWeek.fillna(1).astype(
                np.int32)
            pbar.update(1)

            # Extract features "CompetitionOpenSince" and "CompetitionDaysOpen"
            df["CompetitionOpenSince"] = pd.to_datetime(
                dict(year=df.CompetitionOpenSinceYear,
                     month=df.CompetitionOpenSinceMonth,
                     day=15))
            df["CompetitionDaysOpen"] = df.Date.subtract(
                df.CompetitionOpenSince).dt.days
            pbar.update(1)

            # Replace some erroneous / outlying data
            df.loc[df.CompetitionDaysOpen < 0, "CompetitionDaysOpen"] = 0
            df.loc[df.CompetitionOpenSinceYear < 1990,
                   "CompetitionDaysOpen"] = 0
            pbar.update(1)

            # Add "CompetitionMonthsOpen" field, limiting the maximum to 2 years to limit number of unique categories.
            df["CompetitionMonthsOpen"] = df["CompetitionDaysOpen"] // 30
            df.loc[df.CompetitionMonthsOpen > 24, "CompetitionMonthsOpen"] = 24
            df["Promo2Since"] = pd.to_datetime(
                df.apply(lambda x: isoweek.Week(x.Promo2SinceYear, x.
                                                Promo2SinceWeek).monday(),
                         axis=1).astype(pd.datetime))
            df["Promo2Days"] = df.Date.subtract(df["Promo2Since"]).dt.days
            df.loc[df.Promo2Days < 0, "Promo2Days"] = 0
            df.loc[df.Promo2SinceYear < 1990, "Promo2Days"] = 0
            df["Promo2Weeks"] = df["Promo2Days"] // 7
            df.loc[df.Promo2Weeks < 0, "Promo2Weeks"] = 0
            df.loc[df.Promo2Weeks > 25, "Promo2Weeks"] = 25
            pbar.update(1)

        # Durations
        columns = ["Date", "Store", "Promo", "StateHoliday", "SchoolHoliday"]
        for name, df in zip(("train", "test"),
                            (train[columns], test[columns])):
            field = 'SchoolHoliday'
            df = df.sort_values(['Store', 'Date'])
            get_elapsed(df, field, 'After')
            field = 'StateHoliday'
            df = df.sort_values(['Store', 'Date'])
            get_elapsed(df, field, 'After')
            field = 'Promo'
            df = df.sort_values(['Store', 'Date'])
            get_elapsed(df, field, 'After')
            # Set the active index to Date
            df = df.set_index("Date")
            # Set null values from elapsed field calculations to 0
            columns = ['SchoolHoliday', 'StateHoliday', 'Promo']
            for p in columns:
                a = 'After' + p
                df[a] = df[a].fillna(0)
            # Window functions in pandas to calculate rolling quantities
            bwd = df[['Store'] +
                     columns].sort_index().groupby("Store").rolling(
                         7, min_periods=1).sum()
            fwd = df[['Store'] + columns].sort_index(
                ascending=False).groupby("Store").rolling(7,
                                                          min_periods=1).sum()
            # We want to drop the Store indices grouped together in the window function
            bwd.drop('Store', 1, inplace=True)
            bwd.reset_index(inplace=True)
            fwd.drop('Store', 1, inplace=True)
            fwd.reset_index(inplace=True)
            df.reset_index(inplace=True)
            df = df.merge(bwd, 'left', ['Date', 'Store'], suffixes=['', '_bw'])
            df = df.merge(fwd, 'left', ['Date', 'Store'], suffixes=['', '_fw'])
            df.drop(columns, 1, inplace=True)
            df["Date"] = pd.to_datetime(df.Date)

            if name == "train":
                joined = tmerger.join_df(joined, df, ['Store', 'Date'])
            elif name == "test":
                joined_test = tmerger.join_df(joined_test, df,
                                              ['Store', 'Date'])
            pbar.update(1)

        # The authors also removed all instances where the store had zero sale / was closed
        # We speculate that this may have cost them a higher standing in the competition
        joined = joined[joined.Sales != 0]
        joined.reset_index(inplace=True)
        joined_test.reset_index(inplace=True)
        pbar.update(1)

        # Save to feather
        joined.to_feather(preprocessed_train_path)
        joined_test.to_feather(preprocessed_test_path)
        pbar.update(1)
        print("Data saved to feather.")
        return joined, joined_test
Beispiel #25
0
    def _get_event_list(self, request, **kwargs):
        data = {}
        o = getattr(request, 'event', request.organizer)
        list_type = self.request.GET.get("style", o.settings.event_list_type)
        data['list_type'] = list_type

        if hasattr(self.request,
                   'event') and data['list_type'] not in ("calendar", "week"):
            if self.request.event.subevents.filter(
                    date_from__gt=now()).count() > 50:
                if self.request.event.settings.event_list_type not in (
                        "calendar", "week"):
                    self.request.event.settings.event_list_type = "calendar"
                data['list_type'] = list_type = 'calendar'

        if hasattr(self.request, 'event'):
            data['name'] = str(request.event.name)
            data['frontpage_text'] = str(
                rich_text(request.event.settings.frontpage_text,
                          safelinks=False))

        cache_key = ':'.join([
            'widget.py',
            'eventlist',
            request.organizer.slug,
            request.event.slug if hasattr(request, 'event') else '-',
            list_type,
            request.GET.urlencode(),
            get_language(),
        ])
        cached_data = cache.get(cache_key)
        if cached_data:
            return self.response(cached_data)

        if list_type == "calendar":
            self._set_month_year()
            _, ndays = calendar.monthrange(self.year, self.month)

            data['date'] = date(self.year, self.month, 1)
            if hasattr(self.request, 'event'):
                tz = pytz.timezone(self.request.event.settings.timezone)
            else:
                tz = pytz.UTC
            before = datetime(self.year, self.month, 1, 0, 0, 0,
                              tzinfo=tz) - timedelta(days=1)
            after = datetime(self.year, self.month, ndays, 0, 0, 0,
                             tzinfo=tz) + timedelta(days=1)

            ebd = defaultdict(list)

            if hasattr(self.request, 'event'):
                add_subevents_for_days(
                    filter_qs_by_attr(
                        self.request.event.subevents_annotated('web').filter(
                            event__sales_channels__contains=self.request.
                            sales_channel.identifier), self.request), before,
                    after, ebd, set(), self.request.event,
                    kwargs.get('cart_namespace'))
            else:
                timezones = set()
                add_events_for_days(
                    self.request,
                    filter_qs_by_attr(
                        Event.annotated(
                            self.request.organizer.events,
                            'web').filter(sales_channels__contains=self.
                                          request.sales_channel.identifier),
                        self.request), before, after, ebd, timezones)
                add_subevents_for_days(
                    filter_qs_by_attr(
                        SubEvent.annotated(
                            SubEvent.objects.filter(
                                event__organizer=self.request.organizer,
                                event__is_public=True,
                                event__live=True,
                                event__sales_channels__contains=self.request.
                                sales_channel.identifier).prefetch_related(
                                    'event___settings_objects',
                                    'event__organizer___settings_objects')),
                        self.request), before, after, ebd, timezones)

            data['weeks'] = weeks_for_template(ebd, self.year, self.month)
            for w in data['weeks']:
                for d in w:
                    if not d:
                        continue
                    d['events'] = self._serialize_events(d['events'] or [])
        elif list_type == "week":
            self._set_week_year()

            if hasattr(self.request, 'event'):
                tz = pytz.timezone(self.request.event.settings.timezone)
            else:
                tz = pytz.UTC

            week = isoweek.Week(self.year, self.week)
            data['week'] = [self.year, self.week]
            before = datetime(week.monday().year,
                              week.monday().month,
                              week.monday().day,
                              0,
                              0,
                              0,
                              tzinfo=tz) - timedelta(days=1)
            after = datetime(week.sunday().year,
                             week.sunday().month,
                             week.sunday().day,
                             0,
                             0,
                             0,
                             tzinfo=tz) + timedelta(days=1)

            ebd = defaultdict(list)
            if hasattr(self.request, 'event'):
                add_subevents_for_days(
                    filter_qs_by_attr(
                        self.request.event.subevents_annotated('web'),
                        self.request), before, after, ebd, set(),
                    self.request.event, kwargs.get('cart_namespace'))
            else:
                timezones = set()
                add_events_for_days(
                    self.request,
                    filter_qs_by_attr(
                        Event.annotated(self.request.organizer.events, 'web'),
                        self.request), before, after, ebd, timezones)
                add_subevents_for_days(
                    filter_qs_by_attr(
                        SubEvent.annotated(
                            SubEvent.objects.filter(
                                event__organizer=self.request.organizer,
                                event__is_public=True,
                                event__live=True,
                            ).prefetch_related(
                                'event___settings_objects',
                                'event__organizer___settings_objects')),
                        self.request), before, after, ebd, timezones)

            data['days'] = days_for_template(ebd, week)
            for d in data['days']:
                d['events'] = self._serialize_events(d['events'] or [])
        else:
            if hasattr(self.request, 'event'):
                evs = self.request.event.subevents_sorted(
                    filter_qs_by_attr(
                        self.request.event.subevents_annotated(
                            self.request.sales_channel.identifier),
                        self.request))
                tz = pytz.timezone(request.event.settings.timezone)
                data['events'] = [{
                    'name':
                    str(ev.name),
                    'location':
                    str(ev.location),
                    'date_range':
                    self._get_date_range(ev, ev.event, tz),
                    'availability':
                    self._get_availability(ev, ev.event),
                    'event_url':
                    build_absolute_uri(ev.event, 'presale:event.index'),
                    'subevent':
                    ev.pk,
                } for ev in evs]
            else:
                data['events'] = []
                qs = self._get_event_queryset()
                for event in qs:
                    tz = pytz.timezone(
                        event.cache.get_or_set(
                            'timezone', lambda: event.settings.timezone))
                    if event.has_subevents:
                        dr = daterange(event.min_from.astimezone(tz),
                                       (event.max_fromto or event.max_to
                                        or event.max_from).astimezone(tz))
                        avail = {
                            'color': 'none',
                            'text': gettext('Event series')
                        }
                    else:
                        dr = self._get_date_range(event, event, tz)
                        avail = self._get_availability(event, event)
                    data['events'].append({
                        'name':
                        str(event.name),
                        'location':
                        str(event.location),
                        'date_range':
                        dr,
                        'availability':
                        avail,
                        'event_url':
                        build_absolute_uri(event, 'presale:event.index'),
                    })

        cache.set(cache_key, data, 30)
        # These pages are cached for a really short duration – this should make them pretty accurate, while still
        # providing some protection against burst traffic.
        return self.response(data)
Beispiel #26
0
    def _subevent_list_context(self):
        voucher = None
        if self.request.GET.get('voucher'):
            try:
                voucher = Voucher.objects.get(
                    code__iexact=self.request.GET.get('voucher'),
                    event=self.request.event)
            except Voucher.DoesNotExist:
                pass

        context = {}
        context['list_type'] = self.request.GET.get(
            "style", self.request.event.settings.event_list_type)
        if context['list_type'] not in (
                "calendar", "week") and self.request.event.subevents.filter(
                    date_from__gt=now()).count() > 50:
            if self.request.event.settings.event_list_type not in ("calendar",
                                                                   "week"):
                self.request.event.settings.event_list_type = "calendar"
            context['list_type'] = "calendar"

        if context['list_type'] == "calendar":
            self._set_month_year()
            tz = pytz.timezone(self.request.event.settings.timezone)
            _, ndays = calendar.monthrange(self.year, self.month)
            before = datetime(self.year, self.month, 1, 0, 0, 0,
                              tzinfo=tz) - timedelta(days=1)
            after = datetime(self.year, self.month, ndays, 0, 0, 0,
                             tzinfo=tz) + timedelta(days=1)

            context['date'] = date(self.year, self.month, 1)
            context['before'] = before
            context['after'] = after

            ebd = defaultdict(list)
            add_subevents_for_days(
                filter_qs_by_attr(
                    self.request.event.subevents_annotated(
                        self.request.sales_channel.identifier).using(
                            settings.DATABASE_REPLICA), self.request),
                before,
                after,
                ebd,
                set(),
                self.request.event,
                self.kwargs.get('cart_namespace'),
                voucher,
            )

            context['show_names'] = ebd.get(
                '_subevents_different_names', False) or sum(
                    len(i) for i in ebd.values() if isinstance(i, list)) < 2
            context['weeks'] = weeks_for_template(ebd, self.year, self.month)
            context['months'] = [date(self.year, i + 1, 1) for i in range(12)]
            context['years'] = range(now().year - 2, now().year + 3)
        elif context['list_type'] == "week":
            self._set_week_year()
            tz = pytz.timezone(self.request.event.settings.timezone)
            week = isoweek.Week(self.year, self.week)
            before = datetime(week.monday().year,
                              week.monday().month,
                              week.monday().day,
                              0,
                              0,
                              0,
                              tzinfo=tz) - timedelta(days=1)
            after = datetime(week.sunday().year,
                             week.sunday().month,
                             week.sunday().day,
                             0,
                             0,
                             0,
                             tzinfo=tz) + timedelta(days=1)

            context['date'] = week.monday()
            context['before'] = before
            context['after'] = after

            ebd = defaultdict(list)
            add_subevents_for_days(
                filter_qs_by_attr(
                    self.request.event.subevents_annotated(
                        self.request.sales_channel.identifier).using(
                            settings.DATABASE_REPLICA), self.request),
                before,
                after,
                ebd,
                set(),
                self.request.event,
                self.kwargs.get('cart_namespace'),
                voucher,
            )

            context['show_names'] = ebd.get(
                '_subevents_different_names', False) or sum(
                    len(i) for i in ebd.values() if isinstance(i, list)) < 2
            context['days'] = days_for_template(ebd, week)
            context['weeks'] = [(date_fromisocalendar(self.year, i + 1, 1),
                                 date_fromisocalendar(self.year, i + 1, 7))
                                for i in range(53 if date(self.year, 12, 31).
                                               isocalendar()[1] == 53 else 52)]
            context['years'] = range(now().year - 2, now().year + 3)
            context['week_format'] = get_format('WEEK_FORMAT')
            if context['week_format'] == 'WEEK_FORMAT':
                context['week_format'] = WEEK_FORMAT
        else:
            context['subevent_list'] = self.request.event.subevents_sorted(
                filter_qs_by_attr(
                    self.request.event.subevents_annotated(
                        self.request.sales_channel.identifier).using(
                            settings.DATABASE_REPLICA), self.request))
            if self.request.event.settings.event_list_available_only and not voucher:
                context['subevent_list'] = [
                    se for se in context['subevent_list']
                    if not se.presale_has_ended and
                    se.best_availability_state >= Quota.AVAILABILITY_RESERVED
                ]
        return context
Beispiel #27
0
    def get_context_data(self, **kwargs):
        context = super().get_context_data(**kwargs)

        # Show voucher option if an event is selected and vouchers exist
        vouchers_exist = self.request.event.cache.get('vouchers_exist')
        if vouchers_exist is None:
            vouchers_exist = self.request.event.vouchers.exists()
            self.request.event.cache.set('vouchers_exist', vouchers_exist)

        if not self.request.event.has_subevents or self.subevent:
            # Fetch all items
            items, display_add_to_cart = get_grouped_items(
                self.request.event,
                self.subevent,
                filter_items=self.request.GET.getlist('item'),
                filter_categories=self.request.GET.getlist('category'),
                channel=self.request.sales_channel.identifier)
            context['itemnum'] = len(items)
            context['allfree'] = all(
                item.display_price.gross == Decimal('0.00')
                for item in items if not item.has_variations) and all(
                    all(var.display_price.gross == Decimal('0.00')
                        for var in item.available_variations)
                    for item in items if item.has_variations)

            # Regroup those by category
            context['items_by_category'] = item_group_by_category(items)
            context['display_add_to_cart'] = display_add_to_cart

            context['show_vouchers'] = vouchers_exist
            context['vouchers_exist'] = vouchers_exist
        else:
            context['show_vouchers'] = False
            context['vouchers_exist'] = vouchers_exist

        context['ev'] = self.subevent or self.request.event
        context['subevent'] = self.subevent
        context['cart'] = self.get_cart()
        context['has_addon_choices'] = get_cart(
            self.request).filter(item__addons__isnull=False).exists()

        if self.subevent:
            context['frontpage_text'] = str(self.subevent.frontpage_text)
        else:
            context['frontpage_text'] = str(
                self.request.event.settings.frontpage_text)

        context['list_type'] = self.request.GET.get(
            "style", self.request.event.settings.event_list_type)
        if context['list_type'] not in (
                "calendar",
                "week") and self.request.event.subevents.count() > 100:
            if self.request.event.settings.event_list_type not in ("calendar",
                                                                   "week"):
                self.request.event.settings.event_list_type = "calendar"
            context['list_type'] = "calendar"

        if context[
                'list_type'] == "calendar" and self.request.event.has_subevents:
            self._set_month_year()
            tz = pytz.timezone(self.request.event.settings.timezone)
            _, ndays = calendar.monthrange(self.year, self.month)
            before = datetime(self.year, self.month, 1, 0, 0, 0,
                              tzinfo=tz) - timedelta(days=1)
            after = datetime(self.year, self.month, ndays, 0, 0, 0,
                             tzinfo=tz) + timedelta(days=1)

            context['date'] = date(self.year, self.month, 1)
            context['before'] = before
            context['after'] = after

            ebd = defaultdict(list)
            add_subevents_for_days(
                filter_qs_by_attr(
                    self.request.event.subevents_annotated(
                        self.request.sales_channel.identifier).using(
                            settings.DATABASE_REPLICA), self.request), before,
                after, ebd, set(), self.request.event,
                kwargs.get('cart_namespace'))

            context['show_names'] = ebd.get(
                '_subevents_different_names', False) or sum(
                    len(i) for i in ebd.values() if isinstance(i, list)) < 2
            context['weeks'] = weeks_for_template(ebd, self.year, self.month)
            context['months'] = [date(self.year, i + 1, 1) for i in range(12)]
            context['years'] = range(now().year - 2, now().year + 3)
        elif context[
                'list_type'] == "week" and self.request.event.has_subevents:
            self._set_week_year()
            tz = pytz.timezone(self.request.event.settings.timezone)
            week = isoweek.Week(self.year, self.week)
            before = datetime(week.monday().year,
                              week.monday().month,
                              week.monday().day,
                              0,
                              0,
                              0,
                              tzinfo=tz) - timedelta(days=1)
            after = datetime(week.sunday().year,
                             week.sunday().month,
                             week.sunday().day,
                             0,
                             0,
                             0,
                             tzinfo=tz) + timedelta(days=1)

            context['date'] = week.monday()
            context['before'] = before
            context['after'] = after

            ebd = defaultdict(list)
            add_subevents_for_days(
                filter_qs_by_attr(
                    self.request.event.subevents_annotated(
                        self.request.sales_channel.identifier).using(
                            settings.DATABASE_REPLICA), self.request), before,
                after, ebd, set(), self.request.event,
                kwargs.get('cart_namespace'))

            context['show_names'] = ebd.get(
                '_subevents_different_names', False) or sum(
                    len(i) for i in ebd.values() if isinstance(i, list)) < 2
            context['days'] = days_for_template(ebd, week)
            context['weeks'] = [date(self.year, i + 1, 1) for i in range(12)]
            context['weeks'] = [i + 1 for i in range(53)]
            context['years'] = range(now().year - 2, now().year + 3)
            context['week_format'] = get_format('WEEK_FORMAT')
            if context['week_format'] == 'WEEK_FORMAT':
                context['week_format'] = WEEK_FORMAT
        elif self.request.event.has_subevents:
            context['subevent_list'] = self.request.event.subevents_sorted(
                filter_qs_by_attr(
                    self.request.event.subevents_annotated(
                        self.request.sales_channel.identifier).using(
                            settings.DATABASE_REPLICA), self.request))

        context['show_cart'] = (context['cart']['positions']
                                and (self.request.event.has_subevents
                                     or self.request.event.presale_is_running))
        if self.request.event.settings.redirect_to_checkout_directly:
            context['cart_redirect'] = eventreverse(
                self.request.event,
                'presale:event.checkout.start',
                kwargs={'cart_namespace': kwargs.get('cart_namespace') or ''})
            if context['cart_redirect'].startswith('https:'):
                context[
                    'cart_redirect'] = '/' + context['cart_redirect'].split(
                        '/', 3)[3]
        else:
            context['cart_redirect'] = self.request.path

        return context
    ax_tp.tick_params(axis="x", rotation=45)

    # ax_p.set_xticks(np.arange(1,54))
    # ax_p.set_xticklabels(tticks_l_year)

    ax_p.set_title("campylob." if disease == "campylobacter" else disease,
                   fontsize=22)
    ax_tp.set_xlabel("time [years]", fontsize=22)

    if i == 0:
        ax_p.set_ylabel("periodic\ncontribution", fontsize=22)
        ax_t.set_ylabel("trend\ncontribution", fontsize=22)
        ax_tp.set_ylabel("combined\ncontribution", fontsize=22)
    #elif i==2:
    ax_t.set_xlim(
        (isoweek.Week(2013, 1).wednesday(), isoweek.Week(2016, 1).wednesday()))
    ax_t.set_xticks(
        [isoweek.Week(i, 1).wednesday() for i in range(2013, 2017)])
    ax_t.set_xticklabels(range(2013, 2017))

    ax_t.tick_params(labelbottom=False, labelleft=True, labelsize=18, length=6)
    ax_p.tick_params(labelbottom=False, labelleft=True, labelsize=18, length=6)
    ax_tp.tick_params(labelbottom=True, labelleft=True, labelsize=18, length=6)

    fig.text(0,
             1 + 0.025,
             r"$\textbf{" + str(i + 1) + r"A}$",
             fontsize=22,
             transform=ax_p.transAxes)
    fig.text(0,
             1 + 0.025,
Beispiel #29
0
def _parse_yearweek(yearweek):
    """Utility function to convert internal string representations of calender weeks into datetime objects. Uses strings of format `<year>-KW<week>`. Weeks are 1-based."""
    year, week = yearweek_regex.search(yearweek).groups()
    # datetime.combine(isoweek.Week(int(year), int(week)).wednesday(),time(0))
    return isoweek.Week(int(year), int(week))
Beispiel #30
0
    def get(self, userid):
        """return statuses for the range apecified by start and end query
        parameters. If both are not present at the same time, last
        present week is used. If there is no data at all, current week
        is returned.
        """
        start = self.request.get('start')
        end = self.request.get('end')
        flex = None
        if (start and end):
            start = datetime.datetime.strptime(start, '%Y-%m-%d').date()
            end = datetime.datetime.strptime(end, '%Y-%m-%d').date()
        else:
            reports = model.report.getList(userid, 0, 1)
            if len(reports) > 0:
                flex = reports[0]['flex']
                nextWeek = isoweek.Week(reports[0]['year'],
                                        reports[0]['week']) + 1
                start = nextWeek.monday()
                end = nextWeek.sunday()
        if not (start and end):
            dt = datetime.date.today()
            start = dt - datetime.timedelta(days=dt.weekday())
            end = start + datetime.timedelta(days=6)
        days = model.day.getList(userid, start, end)
        # create frontend-friendly array
        retlist = []
        while start <= end:
            if len(days) and days[0]['date'] < start:
                days.pop(0)
                next
            if len(days) and days[0]['date'] == start:
                retlist.append(days.pop(0))
            else:
                # Append default values
                if start.weekday() in WORKDAYS:
                    retlist.append({
                        'date': start,
                        'arrival': '8:00',
                        'break': 15,
                        'departure': '16:15',
                        'extra': 0,
                        'type': 'work'
                    })
                else:
                    retlist.append({
                        'date': start,
                        'arrival': '',
                        'break': None,
                        'departure': '',
                        'extra': None,
                        'type': 'off'
                    })

            start += datetime.timedelta(days=1)
        self.response.content_type = 'application/json'
        self.response.write(
            json.dumps({
                'dl': retlist,
                'flex': flex
            }, cls=DateEncoder))