Пример #1
1
def test_approve(
    create_user, create_reservation, create_blocking, smtp, notify_blocker, colliding_reservation, colliding_occurrence
):
    blocking = create_blocking(start_date=date.today(), end_date=date.today() + timedelta(days=1))
    br = blocking.blocked_rooms[0]
    other_user = create_user(123)
    resv = create_reservation(
        start_dt=datetime.combine(blocking.start_date, time(8)),
        end_dt=datetime.combine(blocking.start_date, time(10)),
        created_by_user=other_user if colliding_reservation else blocking.created_by_user,
        booked_for_user=other_user if colliding_reservation else blocking.created_by_user,
    )
    resv2 = create_reservation(
        start_dt=datetime.combine(blocking.start_date + timedelta(days=1), time(8)),
        end_dt=datetime.combine(blocking.end_date + timedelta(days=1), time(10)),
        repeat_frequency=RepeatFrequency.DAY,
        created_by_user=other_user if colliding_occurrence else blocking.created_by_user,
        booked_for_user=other_user if colliding_occurrence else blocking.created_by_user,
    )
    assert br.state == BlockedRoom.State.pending
    br.approve(notify_blocker=notify_blocker)
    assert br.state == BlockedRoom.State.accepted
    assert resv.is_rejected == colliding_reservation
    assert not resv2.is_rejected
    for occ in resv2.occurrences:
        assert occ.is_rejected == (colliding_occurrence and blocking.is_active_at(occ.date))
    if notify_blocker:
        extract_emails(smtp, one=True, to=blocking.created_by_user.email, subject="Room blocking ACCEPTED")
    assert len(smtp.outbox) == 2 * (colliding_occurrence + colliding_reservation)  # 2 emails per rejection
Пример #2
0
    def testReservationSpecificBlockingMethods(self):
        block = self._createTestBlocking()
        candResv = Location.getDefaultLocation().factory.newReservation()
        candResv.startDT = datetime.combine(block.startDate - timedelta(days=1), time())
        candResv.endDT = datetime.combine(block.endDate + timedelta(days=1), time(23, 59))
        candResv.repeatability = RepeatabilityEnum.daily
        candResv.isConfirmed = None

        candResv.room = self._room7
        self.assertTrue(candResv.getBlockingConflictState() is None) # No blocking
        self.assertEqual(candResv.getBlockedDates(), [])
        br = self._blockRoom(block, self._room7, False)
        block.update()
        self.assertEqual(candResv.getBlockingConflictState(), 'pending') # Pending blocking
        self.assertEqual(candResv.getBlockedDates(), [])
        br.approve(sendNotification=False)
        self.assertEqual(candResv.getBlockingConflictState(), 'active') # Active blocking
        blockingDays = frozenset((date(2010, 12, 31), date(2011, 1, 1)))
        self.assertEqual(frozenset(candResv.getBlockedDates()), blockingDays)
        # Test with various users set. This basically tests if all people who are allowed to override can actually override
        self.assertTrue(candResv.getBlockingConflictState(self._avatar1) is None) # room owner
        self.assertEqual(candResv.getBlockedDates(self._avatar1), [])
        self.assertTrue(candResv.getBlockingConflictState(self._avatar2) is None) # blocking owner
        self.assertEqual(candResv.getBlockedDates(self._avatar2), [])
        self.assertEqual(candResv.getBlockingConflictState(self._avatar3), 'active') # not permitted to override
        self.assertEqual(frozenset(candResv.getBlockedDates(self._avatar3)), blockingDays)
        self.assertTrue(candResv.getBlockingConflictState(self._avatar4) is None) # on blocking ACL
        self.assertEqual(candResv.getBlockedDates(self._avatar4), [])
        # Rejecting an existing blocking is not possible via the UI, but we can test it anyway
        br.reject(sendNotification=False)
        self.assertTrue(candResv.getBlockingConflictState() is None) # No blocking
        self.assertEqual(candResv.getBlockedDates(), [])
Пример #3
0
    def check_event_sequence(self, when, now=None, interval=timedelta(minutes=15)):
        """
        Checks if all the measures have been saved in *when* date.
        If *now* is given or if *
        :returns (ok, missing) If ok is True, no measures are lost, if False, missing is
            a list
        """
        if isinstance(when, datetime):
            when = when.date()
        start = datetime.combine(when, time(0, 0, 0))
        if now:
            end = now
        else:
            end = datetime.combine(when, time(23, 59, 59, 9999))

        qs = self.energy_set.filter(timestamp__gte=start, timestamp__lte=end)
        qs = qs.order_by('timestamp')

        measured = set(map(lambda x: x.time(), qs.values_list('timestamp', flat=True)))
        expected = set()
        t0 = start + interval
        while t0 <= end:
            expected.add(t0.time())
            t0 += interval
        diff = expected.difference(measured)
        return self.EnergyMesasuresReport(qs.count(), diff, not diff)
    def transition_create_(self):
        pool = Pool()
        Appointment = pool.get('gnuhealth.appointment')
        Company = pool.get('company.company')

        timezone = None
        company_id = Transaction().context.get('company')
        if company_id:
            company = Company(company_id)
            if company.timezone:
                timezone = pytz.timezone(company.timezone)
            else:
                self.raise_user_error('no_company_timezone')

        appointments = []

        # Iterate over days
        day_count = (self.start.date_end - self.start.date_start).days + 1
        
        # Validate dates
        if (self.start.date_start and self.start.date_end):
            if (self.start.date_end < self.start.date_start):
                self.raise_user_error('end_before_start')

            if (day_count > 31):
                self.raise_user_error('period_too_long')
        
        for single_date in (self.start.date_start + timedelta(n)
            for n in range(day_count)):
            if ((single_date.weekday() == 0 and self.start.monday)
            or (single_date.weekday() == 1 and self.start.tuesday)
            or (single_date.weekday() == 2 and self.start.wednesday)
            or (single_date.weekday() == 3 and self.start.thursday)
            or (single_date.weekday() == 4 and self.start.friday)
            or (single_date.weekday() == 5 and self.start.saturday)
            or (single_date.weekday() == 6 and self.start.sunday)):
                # Iterate over time
                dt = datetime.combine(
                    single_date, self.start.time_start)
                dt = timezone.localize(dt)
                dt = dt.astimezone(pytz.utc) 
                dt_end = datetime.combine(
                    single_date, self.start.time_end)
                dt_end = timezone.localize(dt_end)
                dt_end = dt_end.astimezone(pytz.utc) 
                while dt < dt_end:
                    appointment = {
                        'healthprof': self.start.healthprof.id,
                        'speciality': self.start.specialty.id,
                        'institution': self.start.institution.id,
                        'appointment_date': dt,
                        'appointment_date_end': dt +
                            timedelta(minutes=self.start.appointment_minutes),
                        'state': 'free',
                        }
                    appointments.append(appointment)
                    dt += timedelta(minutes=self.start.appointment_minutes)
        if appointments:
            Appointment.create(appointments)
        return 'open_'
Пример #5
0
    def test_seconds_to_expiry(self):
        "Test that it handles naive and tz-aware times"

        with self.settings(USE_TZ=False):
            at = AccessToken(expires_on=TOMORROW)
            expires_at = datetime.combine(at.expires_on, time.min)
            self.assertTrue(is_naive(expires_at))
            self.assertTrue(is_naive(now()))
            self.assertEqual(
                at.seconds_to_expiry,
                int((expires_at - now()).total_seconds())
            )

        with self.settings(USE_TZ=True):
            at = AccessToken(expires_on=TOMORROW)
            expires_at = make_aware(
                datetime.combine(at.expires_on, time.min),
                get_current_timezone()
            )
            self.assertTrue(is_aware(expires_at))
            self.assertTrue(is_aware(now()))
            self.assertEqual(
                at.seconds_to_expiry,
                int((expires_at - now()).total_seconds())
            )
Пример #6
0
    def form_valid(self, form):
        '''
        Create slots and return success message.
        '''
        startDate = form.cleaned_data['startDate']
        endDate = form.cleaned_data['endDate']
        startTime = form.cleaned_data['startTime']
        endTime = form.cleaned_data['endTime']
        instructor = form.cleaned_data['instructorId']

        interval_minutes = getConstant('privateLessons__lessonLengthInterval')

        this_date = startDate
        while this_date <= endDate:
            this_time = startTime
            while this_time < endTime:
                InstructorAvailabilitySlot.objects.create(
                    instructor=instructor,
                    startTime=ensure_localtime(datetime.combine(this_date, this_time)),
                    duration=interval_minutes,
                    location=form.cleaned_data.get('location'),
                    room=form.cleaned_data.get('room'),
                    pricingTier=form.cleaned_data.get('pricingTier'),
                )
                this_time = (ensure_localtime(datetime.combine(this_date, this_time)) + timedelta(minutes=interval_minutes)).time()
            this_date += timedelta(days=1)

        return JsonResponse({'valid': True})
Пример #7
0
def gen_ical(courses):
    cal = Calendar()
    cal["version"] = "2.0"
    cal[
        "prodid"
    ] = "-//Zhejiang University//LIU Dongyuan//ZH"  # *mandatory elements* where the prodid can be changed, see RFC 5445

    for course in courses:
        for lesson in course["lessons"]:
            weeks = lesson["weeks"]
            for recur in weeks:
                event = Event()
                event.add("summary", unify_brackets(course["name"]))
                offset_days = lesson["day"] - 1 + 7 * (int(recur) - 1)
                offset = timedelta(days=offset_days)
                classdate = week_start + offset
                start = lesson_time[lesson["start"]]["start"]
                end = lesson_time[lesson["end"]]["end"]
                event.add("dtstart", datetime.combine(classdate, start))
                event.add("dtend", datetime.combine(classdate, end))
                event.add("location", lesson["location"])
                event.add("description", u"教师:" + course["teacher"])
                event["uid"] = str(uuid1()) + "@ZJU"
                cal.add_component(event)
    return cal.to_ical()
Пример #8
0
    def get_worked_day_lines(self, contracts, date_from, date_to):
        """
        @param contract: Browse record of contracts
        @return: returns a list of dict containing the worked days that should be applied for the given contract between date_from and date_to
        """
        res = []
        # fill only if the contract as a working schedule linked
        for contract in contracts.filtered(lambda contract: contract.resource_calendar_id):
            day_from = datetime.combine(fields.Date.from_string(date_from), time.min)
            day_to = datetime.combine(fields.Date.from_string(date_to), time.max)

            calendar = contract.resource_calendar_id

            benefit_types = self.env['hr.benefit.type'].search([('code', '!=', False)])
            for benefit_type in benefit_types:
                hours = contract.employee_id.get_benefit_days_data(benefit_type, day_from, day_to)['hours']
                if hours:
                    line = {
                        'name': benefit_type.name,
                        'sequence': benefit_type.sequence,
                        'code': benefit_type.code,
                        'number_of_days': hours / calendar.hours_per_day, # n_days returned by benefit_days_data doesn't make sense for extra work
                        'number_of_hours': hours,
                    }
                    res.append(line)

        return res
Пример #9
0
    def converts_a_dict_to_an_forecast_object_with_stop_routes_and_vehicles(self):
        forecast_dict = json.loads(test_fixtures.FORECAST_FOR_ROUTE_AND_STOP.decode('latin1'))
        today = date.today()

        forecast = ForecastWithStop.from_dict(forecast_dict)

        self.assertEqual(forecast.time, datetime.combine(today, time(hour=23, minute=9)))

        self.assertEqual(forecast.stop.code, 4200953)
        self.assertEqual(forecast.stop.name, 'PARADA ROBERTO SELMI DEI B/C')
        self.assertEqual(forecast.stop.latitude, -23.675901)
        self.assertEqual(forecast.stop.longitude, -46.752812)

        self.assertEqual(forecast.stop.routes[0].sign, '7021-10')
        self.assertEqual(forecast.stop.routes[0].code, 1989)
        self.assertEqual(forecast.stop.routes[0].direction, 1)
        self.assertEqual(forecast.stop.routes[0].main_to_sec, u'TERM. JOÃO DIAS')
        self.assertEqual(forecast.stop.routes[0].sec_to_main, u'JD. MARACÁ')
        self.assertEqual(forecast.stop.routes[0].quantity, 1)

        self.assertEqual(forecast.stop.routes[0].vehicles[0].prefix, '74558')
        self.assertEqual(forecast.stop.routes[0].vehicles[0].arriving_at, datetime.combine(today, time(hour=23, minute=11)))
        self.assertEqual(forecast.stop.routes[0].vehicles[0].accessible, True)
        self.assertEqual(forecast.stop.routes[0].vehicles[0].latitude, -23.67603)
        self.assertEqual(forecast.stop.routes[0].vehicles[0].longitude, -46.75891166666667)
Пример #10
0
 def look_human(self):
     syslog.syslog('Looking human for %s' % self.nickname)
     # build a streamer of a sample of tweets
     self.build_streamer()
     ## schedule each bot to tweet a random tweet pulled from corpus at random specified time depending on if its a weekday or not
     # get todays date
     today = date.today() 
     # get whether its a weekday or weekend
     week_type = self.get_weektime(today.weekday())
     # get minimum datetime and maximum datetime to spawn intervals in between them
     mintime = time(self.scheduled_tweets[week_type]['times'][0], 0)
     mindt = datetime.combine(today, mintime)
     maxtime = time(self.scheduled_tweets[week_type]['times'][1], 0)
     maxdt = datetime.combine(today, maxtime)
     # get each bot, and use gevent to spawn_later tasks based on the week_type with a random tweet
     for bot in self.bot_list:
        intervals = [ self.randtime(mindt, maxdt) for x in xrange(self.scheduled_tweets[week_type]['num_tweets']) ]
        s = ' '.join([ str(datetime.fromtimestamp(interval)) for interval in intervals ])
        syslog.syslog('%s times to tweet -> %s' % (bot.name, s))
        bot.last_intervals = intervals
        # assign the gevent to spawn_later by mapping each interval generated, find the time delta to determine number of seconds until event
        # and then pull a random tweet from the corpus
        map(lambda time: gevent.spawn_later(time - int(datetime.now().strftime('%s')), bot.tweet, self.get_random_tweet()), intervals)
     # reset corpus
     self.twitter_corpus = []
Пример #11
0
 def timediff(self, stime, etime):
     stime = stime.split(":")
     stime = time(int(stime[0]), int(stime[1]), int(stime[2]))
     etime = etime.split(":")
     etime = time(int(etime[0]), int(etime[1]), int(etime[2]))   
     tdiff = datetime.combine(self._date, etime) - datetime.combine(self._date, stime)
     return str(tdiff)
Пример #12
0
def main( dt = datetime(2015,4,24), symbols ={ 'ESM5', 'NQM5'}, sleep_max_sec = 60, verbose=0):
    delay = random.randint(0,sleep_max_sec)
    print "%s processing %s delay %d secs" % ( datetime.now().strftime('%H:%M:%S'), dt.strftime('%Y%m%d') , delay)   
    tm.sleep(random.randint(0,sleep_max_sec))

    gen = snapshot_generator()

    gen.instruments = symbols
    gen.date = dt

    start_snaps  = time(8, 0, 0)
    stop_snaps = time(16, 0, 0)
    gen.intervals = [ timedelta(milliseconds=100),  timedelta(seconds=1), timedelta(seconds=5) ]
    gen.interval_names = { '100ms', '1s', '5s' }
    gen.halflives      = [ timedelta(milliseconds=200), timedelta(seconds=2), timedelta(seconds=10) ]
    gen.halflive_names  = ['200ms','2s', '10s']
    gen.verbose = verbose
    
    bt = pybt.pybt()
    bt.handler = gen
    bt.date = bt.handler.date
    bt.verbose = verbose
    for ins in gen.instruments: 
        bt.symbols.append(ins)
    bt.start_time = datetime.combine( bt.date, time(7, 0, 0)) 
    bt.end_time   = datetime.combine( bt.date, time(16, 10, 0))
    
    for interval in gen.intervals:
        bt.add_timer( interval, datetime.combine(bt.date,start_snaps), datetime.combine(bt.date, stop_snaps))    

    bt.run()
Пример #13
0
    def iter_job_adverts(self):
        re_id = re.compile('http://offre-emploi.monster.fr/(.*?).aspx', re.DOTALL)
        trs = self.document.getroot().xpath("//table[@class='listingsTable']/tbody/tr")
        for tr in trs:
            if 'class' in tr.attrib and tr.attrib['class'] != 'aceHidden':
                a = self.parser.select(tr, 'td/div/div[@class="jobTitleContainer"]/a', 1, method='xpath')
                _id = u'%s' % re_id.search(a.attrib['href']).group(1)
                advert = MonsterJobAdvert(_id)
                advert.society_name = u'%s' % self.parser.select(tr, 'td/div/div[@class="companyContainer"]/div/a',
                                                                 1, method='xpath').attrib['title']
                advert.title = u'%s' % a.text

                date = self.parser.select(tr, 'td/div/div[@class="fnt20"]', 1, method='xpath').text_content().strip()
                now = datetime.now()
                number = re.search("\d+", date)
                if number:
                    if 'heures' in date:
                        date = now - timedelta(hours=int(number.group(0)))
                        advert.publication_date = datetime.combine(date, time())
                    elif 'jour' in date:
                        date = now - timedelta(days=int(number.group(0)))
                        advert.publication_date = datetime.combine(date, time())
                else:
                    advert.publication_date = datetime.combine(now, time.min)

                place = self.parser.select(tr, 'td/div/div[@class="jobLocationSingleLine"]/a', method='xpath')
                if len(place) != 0:
                    advert.place = u'%s' % place[0].attrib['title']

                yield advert
Пример #14
0
def postplan_update_view(request,*args,**kwargs):
    year=kwargs.get('year')
    month=kwargs.get('month')
    day=kwargs.get('day')
    slug=kwargs.get('slug')
    post=Post.objects(slug=slug).first()
    if request.method=="POST":
        form=PlanForm(request.POST)
        if form.is_valid():
            ending=form.cleaned_data['ending']
            starting=form.cleaned_data['starting']
            title=form.cleaned_data['title']
            transport=form.cleaned_data['transport']
            cost=form.cleaned_data['cost']
            content=form.cleaned_data['content']
            plan=PostPlan(title=title,transport=transport,cost=cost,content=content)
            plan.save()
            plan.starting=datetime.combine(date(year=year,month=month,day=day),starting)
            plan.ending=datetime.combine(date(year=year,month=month,day=day),ending)
            plan.save()
            post.postplans.append(plan.number)
            post.save()
            return HttpResponseRedirect(reverse('plan-create',kwargs={'slug':slug,'year':year,'month':month,'day':day}))
    else:
        plan=PostPlan.objects(number=kwargs.get('pk')).first()
        form=PlanForm()
        return render(request,'plans/plan_update.html',{'plan':plan,'form':form})
Пример #15
0
def compute_fifa_wc_8th(event):
    if (datetime.date.today()-event.start_date).days<15:
        for match in event.matchs.filter(type__identifier='MATCH_EIGHTH'):
            event.matchs.remove(match)
            match.delete()
        event.save()    
        
        event_meta = get_event_meta(event)
        
        for index in range(0,4):
            effective_match = Match()
            first = event_meta['groups'][event_meta['groups_list'][index * 2]][0]
            second = event_meta['groups'][event_meta['groups_list'][(index * 2) + 1]][1]
            effective_match.name = first['name'] + ' vs ' + second['name'] + ' [1' + event_meta['groups_list'][index * 2] + ' - 2' + event_meta['groups_list'][(index * 2) + 1] + ']'
            effective_match.type = Attributes.objects.get(identifier='MATCH_EIGHTH', type='match_type')
            effective_match.when = dt.combine(dates.AddDay(event.start_date,16 + index) , dt.min.time())
            effective_match.first = Participant.objects.get(id=first['id'])
            effective_match.second = Participant.objects.get(id=second['id'])
            effective_match.save()
            event.matchs.add(effective_match)
            event.save()
            effective_match = Match()
            first = event_meta['groups'][event_meta['groups_list'][(index * 2) + 1]][0]
            second = event_meta['groups'][event_meta['groups_list'][index * 2]][1]
            effective_match.name = first['name'] + ' vs ' + second['name'] + ' [1' + event_meta['groups_list'][(index * 2) + 1] + ' - 2' + event_meta['groups_list'][index * 2] + ']'
            effective_match.type = Attributes.objects.get(identifier='MATCH_EIGHTH', type='match_type')
            effective_match.when = dt.combine(dates.AddDay(event.start_date,16 + index) , dt.min.time())
            effective_match.first = Participant.objects.get(id=first['id'])
            effective_match.second = Participant.objects.get(id=second['id'])
            effective_match.save()
            event.matchs.add(effective_match)
            event.save()
        complete_meta_for_type(event,'MATCH_EIGHTH')
Пример #16
0
def extend_voting_period():
    """Extend voting period by EXTEND_VOTING_PERIOD if there is no
    majority decision.

    """

    # avoid circular dependencies
    from remo.voting.models import Poll

    tomorrow = get_date(days=1)
    review_count = User.objects.filter(groups__name='Review').count()

    query_start = make_aware(datetime.combine(tomorrow, datetime.min.time()), pytz.UTC)
    query_end = make_aware(datetime.combine(tomorrow, datetime.max.time()), pytz.UTC)
    polls = Poll.objects.filter(end__range=[query_start, query_end])

    for poll in polls:
        if not poll.is_extended:
            budget_poll = poll.radio_polls.get(question='Budget Approval')
            majority = reduce(or_, map(lambda x: x.votes > review_count / 2,
                                       budget_poll.answers.all()))
            if not majority:
                poll.end += timedelta(seconds=EXTEND_VOTING_PERIOD)
                poll.save()
                subject = '[Urgent] Voting extended for {0}'.format(poll.name)
                recipients = (User.objects.filter(groups=poll.valid_groups)
                              .exclude(pk__in=poll.users_voted.all())
                              .values_list('id', flat=True))
                ctx_data = {'poll': poll}
                template = 'emails/voting_vote_reminder.jinja'
                send_remo_mail.delay(subject=subject,
                                     recipients_list=recipients,
                                     email_template=template,
                                     data=ctx_data)
Пример #17
0
def timeslot_offset_options(
    interval=swingtime_settings.TIMESLOT_INTERVAL,
    start_time=swingtime_settings.TIMESLOT_START_TIME,
    end_delta=swingtime_settings.TIMESLOT_END_TIME_DURATION,
    fmt=swingtime_settings.TIMESLOT_TIME_FORMAT
):
    '''
    Create a list of time slot options for use in swingtime forms.

    The list is comprised of 2-tuples containing the number of seconds since the
    start of the day and a 12-hour temporal representation of that offset.

    '''
    dt = datetime.combine(date.today(), time(0))
    dtstart = datetime.combine(dt.date(), start_time)
    dtend = dtstart + end_delta
    options = []

    delta = utils.time_delta_total_seconds(dtstart - dt)
    seconds = utils.time_delta_total_seconds(interval)
    while dtstart <= dtend:
        options.append((delta, dtstart.strftime(fmt)))
        dtstart += interval
        delta += seconds

    return options
Пример #18
0
    def stats(self, request, pk=None):
        if not request.user.is_superuser:
            return Response({'msg': "You don't have permission to see this data." }, status=status.HTTP_400_BAD_REQUEST)

        res = {}
        res['total_signup'] = User.objects.count()
        res['total_word'] = Word.objects.count()

        today = datetime.now().date()
        tomorrow = today + timedelta(1)
        today_start = datetime.combine(today, time())
        today_end = datetime.combine(tomorrow, time())

        res['today_signup'] = User.objects.filter(date_joined__gte=today_start, date_joined__lte=today_end).count()
        # res['today_login'] = User.objects.filter(last_login__gte=today_start, last_login__lte=today_end).count()
        today = datetime.now().strftime('%Y-%m-%d')

        r = redis.StrictRedis()
        api_access = {}
        _total = 0
        for k in r.keys('%s::*' % today):
            _url = k[12:]
            _val = r.get(k)
            if "POST /api/api-token-auth" in _url:
                 _total += int(_val)
            else:
                api_access[_url] = _val

        res['api_access'] = api_access
        res['today_login'] = _total
        
        return Response([res])
Пример #19
0
    def _make_form(self):
        start_date = None
        force_today = False
        if 'start_date' in request.args:
            force_today = True
            try:
                start_date = datetime.strptime(request.args['start_date'], '%Y-%m-%d').date()
            except ValueError:
                pass

        self.past_date = start_date is not None and start_date < date.today()
        if start_date is None or start_date <= date.today():
            start_dt, end_dt, self.date_changed = get_default_booking_interval(
                duration=self.DEFAULT_BOOKING_DURATION,
                precision=self.DEFAULT_START_TIME_PRECISION,
                force_today=force_today
            )
            self.date_changed = self.date_changed and not self.past_date
        else:
            start_dt = datetime.combine(start_date, Location.working_time_start)
            end_dt = datetime.combine(start_date, Location.working_time_end)
            self.date_changed = False
        defaults = FormDefaults(room_id=self._room.id,
                                start_dt=start_dt,
                                end_dt=end_dt,
                                booked_for_id=session.user.id,
                                booked_for_name=session.user.getStraightFullName().decode('utf-8'),
                                contact_email=session.user.getEmail().decode('utf-8'),
                                contact_phone=session.user.getPhone().decode('utf-8'))

        return self._make_confirm_form(self._room, defaults=defaults, form_class=NewBookingSimpleForm)
Пример #20
0
    def _process_select_room(self):
        # Step 1: Room(s), dates, repetition selection
        form = self._make_select_room_form()
        if form.validate_on_submit():
            flexible_days = form.flexible_dates_range.data
            day_start_dt = datetime.combine(form.start_dt.data.date(), time())
            day_end_dt = datetime.combine(form.end_dt.data.date(), time(23, 59))

            selected_rooms = [r for r in self._rooms if r.id in form.room_ids.data]
            occurrences, candidates = self._get_all_occurrences(form.room_ids.data, form, flexible_days)

            period_form_defaults = FormDefaults(repeat_interval=form.repeat_interval.data,
                                                repeat_frequency=form.repeat_frequency.data)
            period_form = self._make_select_period_form(period_form_defaults)

            # Show step 2 page
            return self._get_view('select_period', rooms=selected_rooms, occurrences=occurrences, candidates=candidates,
                                  start_dt=day_start_dt, end_dt=day_end_dt, period_form=period_form, form=form,
                                  repeat_frequency=form.repeat_frequency.data,
                                  repeat_interval=form.repeat_interval.data, flexible_days=flexible_days).display()

        # GET or form errors => show step 1 page
        return self._get_view('select_room', errors=form.error_list, rooms=self._rooms, form=form,
                              max_room_capacity=Room.max_capacity, can_override=session.user.isRBAdmin(),
                              date_changed=not form.is_submitted() and self.date_changed).display()
Пример #21
0
def add_membership(user_id):
    user = get_user_or_404(user_id)
    form = UserAddGroupMembership()

    if form.validate_on_submit():
        if form.begins_at.data is not None:
            begins_at = datetime.combine(form.begins_at.data, utc.time_min())
        else:
            begins_at = session.utcnow()
        if not form.ends_at.unlimited.data:
            ends_at = datetime.combine(form.ends_at.date.data, utc.time_min())
        else:
            ends_at = None
        make_member_of(user, form.group.data, current_user,
                       closed(begins_at, ends_at))
        message = u"Nutzer zur Gruppe '{}' hinzugefügt.".format(form.group.data.name)
        lib.logging.log_user_event(message, current_user, user)
        session.session.commit()
        flash(u'Nutzer wurde der Gruppe hinzugefügt.', 'success')

        return redirect(url_for(".user_show",
                                user_id=user_id,
                                _anchor='groups'))

    return render_template('user/add_membership.html',
        page_title=u"Neue Gruppenmitgliedschaft für Nutzer {}".format(user_id),
        user_id=user_id, form=form)
Пример #22
0
    def _get_all_occurrences(self, room_ids, form, flexible_days=0, reservation_id=None):
        start_dt = form.start_dt.data
        end_dt = form.end_dt.data
        repeat_frequency = form.repeat_frequency.data
        repeat_interval = form.repeat_interval.data
        day_start_dt = datetime.combine(start_dt.date(), time())
        day_end_dt = datetime.combine(end_dt.date(), time(23, 59))
        today_start_dt = datetime.combine(date.today(), time())
        flexible_start_dt = day_start_dt - timedelta(days=flexible_days)
        if not session.user.isAdmin():
            flexible_start_dt = max(today_start_dt, flexible_start_dt)
        flexible_end_dt = day_end_dt + timedelta(days=flexible_days)

        occurrences = ReservationOccurrence.find_all(
            Reservation.room_id.in_(room_ids),
            Reservation.id != reservation_id,
            ReservationOccurrence.start_dt >= flexible_start_dt,
            ReservationOccurrence.end_dt <= flexible_end_dt,
            ReservationOccurrence.is_valid,
            _join=Reservation,
            _eager=ReservationOccurrence.reservation
        )

        candidates = {}
        for days in xrange(-flexible_days, flexible_days + 1):
            offset = timedelta(days=days)
            series_start = start_dt + offset
            series_end = end_dt + offset
            if series_start < flexible_start_dt:
                continue
            candidates[series_start, series_end] = ReservationOccurrence.create_series(series_start, series_end,
                                                                                       (repeat_frequency,
                                                                                        repeat_interval))
        return occurrences, candidates
Пример #23
0
    def _aux_make_line( node, start_time,figure, lines, color = None, hierarchy = True):
        #TODO check why this happens!!
        if node.id == 0 or node.Parent == None:
            return
        if node.activeTime[0] == -1 :
            node.activeTime = (node.Parent.activeTime[0], node.activeTime[1])
        if node.activeTime[1] == -1 :
            node.activeTime = (node.activeTime[0],node.Parent.activeTime[1])
        if (node.activeTime[0] == -1  or node.activeTime[1] == -1):
            print("dge nakhodesh na parentesh time nadashtan!!! {0} : {1} {2}: {3}".format(node.id, node.action
                                                                                    ,node.activeTime[0],node.activeTime[1]))
            return

        ##################################
        delta1 = datetime.combine(date.today(), node.activeTime[0]) - datetime.combine(date.today(), start_time)
        delta2 = datetime.combine(date.today(), node.activeTime[1]) - datetime.combine(date.today(), start_time)


        if not color:
            r = lambda: random.randint(0,255)
            line_color = '#%02X%02X%02X' % (r(),r(),r())
        else:
            line_color = color

        if len(node.children )> 0 :
            line =  figure.hlines(action_ids[node.action["name"]], delta1.total_seconds(),
                        delta2.total_seconds(), colors="black", lw=15)
        line =  figure.hlines(action_ids[node.action["name"]], delta1.total_seconds(),
            delta2.total_seconds(), colors=line_color, lw=10)

        if len(node.children) > 0 and hierarchy:

            mp = MyPlot(line, node,start_time)
            MyPlot.change_page(figure,mp)
            lines.append(mp)
Пример #24
0
    def adjustTimes(self, start=None, end=None):
        """
        Modifica la TimeTable perchè inizi e termini con i tempi specificati
        (se specificati).  Se necessario vengono aggiunti degli eventi multi
        traccia.
        """
        tpl = {
            'id': None,
            'name': '',
            'custom': '',
            'tracks': self.events.keys(),
            'tags': set(),
            'talk': None,
            'time': None,
            'duration': None,
        }
        e0, e1 = self.limits()
        if start and e0 and start < e0.time():
            for track, events in self.events.items():
                e = dict(tpl)
                e['time'] = datetime.combine(events[0]['time'].date(), start)
                e['duration'] = (e0 - e['time']).seconds / 60
                self.events[track].insert(0, e)

        if end and e1 and end > e1.time():
            d = (datetime.combine(date.today(), end) - e1).seconds / 60
            for track, events in self.events.items():
                e = dict(tpl)
                e['time'] = e1
                e['duration'] = d
                self.events[track].append(e)

        return self
Пример #25
0
 def test_process(self):
     event = Event(
         name="some event", location="Ballwil",
         start_time=datetime.combine(date.today(), time(hour=9)))
     self.plugin.process(event)
     self.assertEqual(event.departure_time, datetime.combine(
         date.today(), time(hour=7, minute=38)))
Пример #26
0
    def save(self, *args, **kwargs): # Overwrite save() to calculate and save the duration of a shift
                                     # and generate a title.

        # As of now, calculating self.hours is a bit hacky because of the way python's datetime work.
        # It works nonetheless.
        adjusted_start_time = datetime.combine(date.today(), self.start_time) # Temporarily use date.today()
                                                                    # to deal with Python's way of calculating datetimes.
                                                                    # http://stackoverflow.com/questions/656297/python-time-timedelta-equivalent 
        adjusted_end_time = datetime.combine(date.today(), self.end_time)
        if self.spans_past_midnight:
            adjusted_end_time += timedelta(days=1) # If the shift spans over midnight, the end date must be a day later.
            if self.start_time < self.end_time: # If the shift starts and ends after midnight:
                adjusted_start_time += timedelta(days=1) # the start date must be a day later as well.
        # Now calculate the duration of the shift through simple subtraction.
        self.hours = adjusted_end_time - adjusted_start_time

        # Set the owner's name as the shift's title, _Unclaimed_ if the shift has no owner.
        """
        if self.owner is None:
            self.title = self.location[0] + "-" + "Unclaimed"
        else:
            self.title = self.location[0] + "-" + str(self.owner)
        """

        super(Shift, self).save(*args, **kwargs)
Пример #27
0
    def validate_python(self, field_dict, state):
        resource = Resource.get(field_dict['resource_id'])
        if not resource.time_based:
            return
        if 'start' in field_dict and field_dict['start']:
            start = field_dict['start']
            start_time = time(int(start['hour']), int(start['minute']))
            date = dateconverter.to_python(field_dict['date'])
            start = datetime.combine(date, start_time)
        elif 'start_datetime' in field_dict and field_dict['start_datetime']:
            start = datetimeconverter.to_python(field_dict['start_datetime'])
        else:
            start = now(resource.place)
            
        if 'end' in field_dict and field_dict['end']:
            end = field_dict['end']
            end_time = time(int(end['hour']), int(end['minute']))
            date = dateconverter.to_python(field_dict['date'])
            end = datetime.combine(date, end_time)
        elif 'end_datetime' in field_dict and field_dict['end_datetime']:
            end = datetimeconverter.to_python(field_dict['end_datetime'])
        else:
            end = start
            
        errors = {}

        if end <= start:
            errors['end'] = _("The booking must end later than it starts!")
            errors['start'] = _("The booking must start before ending!")
            raise Invalid(
                'That booking ends before starting', field_dict, state, error_dict=errors)
Пример #28
0
    def update(self, this_date=date.today()):

        start = datetime.combine(this_date, time.min)
        end = datetime.combine(this_date, time.max)

        date_text = urwid.Text(
            this_date.strftime(self.conf.locale.longdateformat))
        event_column = list()
        all_day_events = self.collection.get_allday_by_time_range(this_date)
        events = self.collection.get_datetime_by_time_range(start, end)

        for event in all_day_events:
            event_column.append(
                urwid.AttrMap(U_Event(event,
                                      conf=self.conf,
                                      this_date=this_date,
                                      eventcolumn=self.eventcolumn),
                              event.color, 'reveal focus'))
        events.sort(key=lambda e: e.start)
        for event in events:
            event_column.append(
                urwid.AttrMap(U_Event(event,
                                      conf=self.conf,
                                      this_date=this_date,
                                      eventcolumn=self.eventcolumn),
                              event.color, 'reveal focus'))
        event_list = [urwid.AttrMap(event, None, 'reveal focus')
                      for event in event_column]
        event_count = len(event_list)
        if not event_list:
            event_list = [urwid.Text('no scheduled events')]
        pile = urwid.ListBox(CSimpleFocusListWalker(event_list))
        pile = urwid.Frame(pile, header=date_text)
        self._w = pile
        return event_count
Пример #29
0
    def filter_with_date_range(self, queryset):
        """
        Filter results based that are within a (possibly open ended) daterange
        """
        # Nothing to do if we don't have a date field
        if not self.date_range_field_name:
            return queryset

        # After the start date
        if self.start_date:
            start_datetime = timezone.make_aware(
                datetime.combine(self.start_date, time(0, 0)),
                timezone.get_default_timezone())

            filter_kwargs = {
                "%s__gt" % self.date_range_field_name: start_datetime,
            }
            queryset = queryset.filter(**filter_kwargs)

        # Before the end of the end date
        if self.end_date:
            end_of_end_date = datetime.combine(
                self.end_date,
                time(hour=23, minute=59, second=59)
            )
            end_datetime = timezone.make_aware(end_of_end_date,
                                               timezone.get_default_timezone())
            filter_kwargs = {
                "%s__lt" % self.date_range_field_name: end_datetime,
            }
            queryset = queryset.filter(**filter_kwargs)

        return queryset
Пример #30
0
def coins_activity(request):
    date = datetime.strptime(request.GET.get('date'), '%Y-%m-%d').date()
    start_period = datetime.strptime(request.GET.get('start_period'), '%H:%M').time()
    end_period = datetime.strptime(request.GET.get('end_period'), '%H:%M').time()
    amount = int(request.GET.get('amount'))
    avtomats = Avtomat.objects.all().select_related('street', 'street__city')
    statistics = Statistic.objects.filter(time__range=(datetime.combine(date, start_period),
                                                       datetime.combine(date, end_period)))
    coins_without_activity = []
    for avtomat in avtomats:
        statistic_from_avtomat = statistics.filter(avtomat_id=avtomat.id).only('kop', 'event')
        if statistic_from_avtomat:
            first = statistic_from_avtomat.first().kop
            amount_accepted = 0
            for i in statistic_from_avtomat:
                if i.event == 3:
                    continue
                if first < i.kop:
                    first = i.kop
                amount_accepted += first - i.kop
                first = i.kop
                if amount_accepted > amount:
                    break
            if amount_accepted <= amount:
                coins_without_activity.append((avtomat, amount_accepted))
    return render(request, 'reports/coins_activity.html', {'coins_without_activity': coins_without_activity,
                                                           'start_period': datetime.combine(date, start_period),
                                                           'end_period': datetime.combine(date, end_period),
                                                           'count': len(coins_without_activity),
                                                           'date': date.strftime('%d%m%Y')})
Пример #31
0
def scrape_nz():    
    utc_tz = pytz.timezone("utc")
    nz_tz = pytz.timezone("Pacific/Auckland")

    today9am = int(
            datetime.combine(
                date=datetime.now(),
                time=time(hour=9)
            ).timestamp()*1e3)
    tomorrow859am = int(
            datetime.combine(
                date=datetime.now()+timedelta(days=1),
                time=time(hour=8,minute=59)
            ).timestamp()*1e3)

    ## List of channels to scrape, case sensitive
    channels = [
            'SKY Sport Select'
            ,'SKY Sport 1'
            ,'SKY Sport 2'
            ,'SKY Sport 3'
            ,'SKY Sport 4'
            ,'SKY Sport 5'
            ,'SKY Sport 6'
            ,'SKY Sport 7'
            ,'SKY Sport 8'
            ,'SKY Sport 9'
            ]
    ## Get channel IDs
    channelURL = "https://static.sky.co.nz/sky/json/channels.prod.json"
    channelJS = requests.get(channelURL).json()
    channelIDdict = {
                        int(x['number']):x['name']
                        for x in channelJS
                        if x['name'] in channels
                    }
    channelIDs = list(channelIDdict.keys())
    ## Get programming
    url = f"https://web-epg.sky.co.nz/prod/epgs/v1?start={today9am}&end={tomorrow859am}&limit=20000"
    req = requests.get(url)
    relevantProgs = [
            x
            for x in req.json()['events']
            if x['channelNumber'] in channelIDs
            ]
    progs = []
    for rp in relevantProgs:
        tba = {}
        ## Start & End
        startUTC = datetime.utcfromtimestamp(int(rp['start'])/1000)
        endUTC = datetime.utcfromtimestamp(int(rp['end'])/1000)
        tba['StartLocal'] = utc_tz.localize(
                                    startUTC
                                        ).astimezone(
                                                nz_tz
                                                    ).replace(tzinfo=None)
        tba['StartUTC'] = startUTC
        tba['EndLocal'] = utc_tz.localize(
                                    endUTC
                                        ).astimezone(
                                                nz_tz
                                                    ).replace(tzinfo=None)
        tba['EndUTC'] = endUTC
        ## ProgrammeName
        tba['ProgrammeName'] = rp['title']
        ## Description
        tba['Description'] = rp['synopsis']
        ## Channel
        tba['Channel'] = channelIDdict[rp['channelNumber']]

        progs.append(tba)
        
    DF = pd.DataFrame(progs).sort_values('StartUTC').reset_index(drop=True)

    columnDict = {
        "StartLocal" : 'DateTime',
        "EndLocal" : 'DateTime',
        "StartUTC" : 'DateTime',
        "EndUTC" : 'DateTime',
        "Channel" : 'str',
        "ProgrammeName" : 'str',
        'Description' : 'str'
        }
    server = "nonDashboard"
    database = "WebScraping"
    sqlTableName = "NewZealandTVGuide"
    primaryKeyColName = "RowID"
        
    insertQ = create_insert_query(DF,columnDict,sqlTableName)

    run_sql_commmand(insertQ,server,database)

    removeDuplicatesQ = create_removeDuplicates_query(columnDict,sqlTableName,primaryKeyColName)

    run_sql_commmand(removeDuplicatesQ,server,database)
Пример #32
0
 def item_pubdate(self, item):
     # item.date is a datetime.date, this needs a datetime.datetime,
     # so set it to midnight on the given date
     return datetime.combine(item.date, datetime.min.time())
Пример #33
0
# This DAG is configured to print the date and sleep for 5 seconds.
# However, it is configured to fail (see the expect_failure command)
# and send an e-mail to your specified email on task failure.
# We set retries to 0 to expedite this.

from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from datetime import datetime, timedelta

YESTERDAY = datetime.combine(
    datetime.today() - timedelta(days=1), datetime.min.time())

default_args = {
    'owner': 'airflow',
    'depends_on_past': False,
    'start_date': YESTERDAY,
    'email': ['YOUR E_MAIL HERE'],
    'email_on_failure': True,
    'email_on_retry': False,
    'retries': 0,
}

with DAG('hello_world_email', default_args=default_args) as dag:
  t1 = BashOperator(task_id='print_date', bash_command='date', dag=dag)
  t2 = BashOperator(task_id='expect_failure', bash_command='exit 1', dag=dag)
  t1 >> t2
    def test_next_execution(self):
        # A scaffolding function
        def reset_dr_db(dag_id):
            session = Session()
            dr = session.query(models.DagRun).filter_by(dag_id=dag_id)
            dr.delete()
            session.commit()
            session.close()

        EXAMPLE_DAGS_FOLDER = os.path.join(
            os.path.dirname(
                os.path.dirname(os.path.dirname(os.path.realpath(__file__)))),
            "airflow/example_dags")

        dagbag = models.DagBag(dag_folder=EXAMPLE_DAGS_FOLDER,
                               include_examples=False)
        dag_ids = [
            'example_bash_operator',  # schedule_interval is '0 0 * * *'
            'latest_only',  # schedule_interval is timedelta(hours=4)
            'example_python_operator',  # schedule_interval=None
            'example_xcom'
        ]  # schedule_interval="@once"

        # The details below is determined by the schedule_interval of example DAGs
        now = timezone.utcnow()
        next_execution_time_for_dag1 = pytz.utc.localize(
            datetime.combine(now.date() + timedelta(days=1), time(0)))
        next_execution_time_for_dag2 = now + timedelta(hours=4)
        expected_output = [
            str(next_execution_time_for_dag1),
            str(next_execution_time_for_dag2), "None", "None"
        ]

        for i in range(len(dag_ids)):
            dag_id = dag_ids[i]

            # Clear dag run so no execution history fo each DAG
            reset_dr_db(dag_id)

            p = subprocess.Popen([
                "airflow", "next_execution", dag_id, "--subdir",
                EXAMPLE_DAGS_FOLDER
            ],
                                 stdout=subprocess.PIPE)
            p.wait()
            stdout = []
            for line in p.stdout:
                stdout.append(str(line.decode("utf-8").rstrip()))

            # `next_execution` function is inapplicable if no execution record found
            # It prints `None` in such cases
            self.assertEqual(stdout[-1], "None")

            dag = dagbag.dags[dag_id]
            # Create a DagRun for each DAG, to prepare for next step
            dag.create_dagrun(run_id='manual__' + now.isoformat(),
                              execution_date=now,
                              start_date=now,
                              state=State.FAILED)

            p = subprocess.Popen([
                "airflow", "next_execution", dag_id, "--subdir",
                EXAMPLE_DAGS_FOLDER
            ],
                                 stdout=subprocess.PIPE)
            p.wait()
            stdout = []
            for line in p.stdout:
                stdout.append(str(line.decode("utf-8").rstrip()))
            self.assertEqual(stdout[-1], expected_output[i])

            reset_dr_db(dag_id)
Пример #35
0
    def get_activities(self, save_json: bool = False, **kwargs):
        """
        Returns data for multiple activities that meet the parameters provided.
        The main use case is to retrieve all activities after the athlete's latest activity in the database
        therefore the default 'after' value will be the latest start_date. If there are no activities for the athlete
        the after value will be None.
        The results are concatenated onto the main dataframe with activities.

        :param save_json: Option to save API response data as a json file, defaults to False
        :param kwargs:
            after - return activities after this date provided as datetime, date, or str in 'yyyy-mm-dd' format,
            before - return activities before this date provided as datetime, date, or str in 'yyyy-mm-dd' format,
            per_page - number of activities per page (default and max are 200 to minimize API calls),
            page - starting page number
        :return:
        """
        after = kwargs.get('after', self.latest_activity)
        before = kwargs.get('before', None)
        if after is not None:
            if isinstance(after, str):
                after = datetime.timestamp(datetime.strptime(
                    after, '%Y-%m-%d'))
            elif isinstance(after, datetime):
                after = datetime.timestamp(after)
            elif isinstance(after, date):
                after = datetime.timestamp(
                    datetime.combine(after, datetime.min.time()))
        if before is not None:
            if isinstance(before, str):
                before = datetime.timestamp(
                    datetime.strptime(before, '%Y-%m-%d'))
            elif isinstance(before, datetime):
                after = datetime.timestamp(before)
            elif isinstance(before, date):
                after = datetime.timestamp(
                    datetime.combine(before, datetime.min.time()))
        per_page = kwargs.get('per_page', 200)
        page = kwargs.get('page', 1)
        response = requests.get(url=f"{self.base_url}/athlete/activities",
                                headers=self.headers,
                                params={
                                    'after': after,
                                    'before': before,
                                    'per_page': per_page,
                                    'page': page
                                })
        assert response.ok, f"{response.status_code}, {response.text}"
        response_data = json.loads(response.text)
        if save_json:
            data_file = os.path.join(
                'Activities', 'activity_lists',
                f"activity_list {datetime.now().strftime('%Y-%m-%d %H%M%S')}.json"
            )
            with open(data_file, 'w') as f:
                json.dump(response_data, f)
        if len(response_data) > 0:
            self.df = pd.concat([self.df,
                                 pd.json_normalize(response_data)
                                 ]).reset_index(drop=True)
            time.sleep(2)
            return self.get_activities(page=(page + 1))
Пример #36
0
    def _clean_interval(self, atype):
        cdata = self.cleaned_data
        start = cdata['start']
        end = cdata['end']

        if not start and not end:
            return constants.FLOATING

        floating_type = constants.NARROW

        get = cdata.get
        is_all_day = get('is_all_day', False)
        start_time = get('start_time')
        end_time = get('end_time')

        # TODO: not start, not end, start time, end time => floating activity with time set but lost in the process

        if start_time is None and end_time is None:
            if not is_all_day:
                if get('busy', False):
                    raise ValidationError(
                        self.error_messages['floating_cannot_busy'],
                        code='floating_cannot_busy',
                    )

                floating_type = constants.FLOATING_TIME

        if not start and end:
            raise ValidationError(self.error_messages['no_start'],
                                  code='no_start')

        if start and start_time:
            start = make_aware_dt(datetime.combine(start, start_time))

        if end and end_time:
            end = make_aware_dt(datetime.combine(end, end_time))

        if start and not end:
            if end_time is not None:
                end = make_aware_dt(datetime.combine(start, end_time))
            else:
                tdelta = atype.as_timedelta()

                if (is_all_day or floating_type
                        == constants.FLOATING_TIME) and tdelta.days:
                    # In 'all day' mode, we round the number of day
                    days = tdelta.days - 1  # Activity already takes 1 day (we do not want it takes 2)

                    if tdelta.seconds:
                        days += 1

                    tdelta = timedelta(days=days)

                end = start + tdelta

        if is_all_day or floating_type == constants.FLOATING_TIME:
            start = make_aware_dt(
                datetime.combine(start, time(hour=0, minute=0)))
            end = make_aware_dt(datetime.combine(end, time(hour=23,
                                                           minute=59)))

        if start > end:
            raise ValidationError(
                self.error_messages['end_before_start_time'],
                code='end_before_start_time',
            )

        cdata['start'] = start
        cdata['end'] = end

        return floating_type
Пример #37
0
def main(args):
    ## initialize configurable variables
    yesterday = date.today() - timedelta(days=1)
    start_time = datetime.combine(yesterday, time(0, 0, 0))
    end_time = datetime.combine(date.today(), time(0, 0, 0))
    h5_path = "/global/projectb/shared/data/genepool/procmon"
    h5_prefix = "procmon_genepool"
    load_intermediate = None
    save_intermediate = None

    ## parse command line arguments
    i = 0
    while i < len(sys.argv):
        if sys.argv[i] == "--start":
            i += 1
            if i < len(sys.argv):
                try:
                    start_time = datetime.strptime(sys.argv[i], "%Y%m%d%H%M%S")
                except:
                    usage(1)
            else:
                usage(1)
        if sys.argv[i] == "--end":
            i += 1
            if i < len(sys.argv):
                try:
                    end_time = datetime.strptime(sys.argv[i], "%Y%m%d%H%M%S")
                except:
                    usage(1)
            else:
                usage(1)
        if sys.argv[i] == "--h5-path":
            i += 1
            if i < len(sys.argv):
                h5_path = sys.argv[i]
                if not os.path.exists(h5_path):
                    print "%s doesn't exist!" % (h5_path)
                    usage(1)
            else:
                usage(1)
        if sys.argv[i] == "--save":
            i += 1
            if i < len(sys.argv):
                save_intermediate = sys.argv[i]
            else:
                usage(1)
        if sys.argv[i] == "--load":
            i += 1
            if i < len(sys.argv):
                load_intermediate = sys.argv[i]
                if not os.path.exists(load_intermediate):
                    print "%s doesn't exist!" % (load_intermediate)
                    usage(1)
            else:
                usage(1)
        if sys.argv[i] == "--help":
            usage(0)
        i += 1

    ## get list of files
    filename_hash = {}
    all_files = os.listdir(h5_path)
    pattern = "%s\.([0-9]+)\.h5" % (h5_prefix)
    regex = re.compile(pattern)
    for filename in all_files:
        f_match = regex.match(filename)
        if f_match is not None:
            currts = datetime.strptime(f_match.group(1), "%Y%m%d%H%M%S")
            if currts >= start_time and currts < end_time:
                filename_hash[currts] = os.path.join(h5_path, filename)

    filenames = []
    keys = sorted(filename_hash.keys())
    for key in keys:
        filenames.append(filename_hash[key])

    q_status = 0
    h_status = 0
    user_hash = None
    summaries = None
    if load_intermediate is not None:
        print "Loading from intermediate data file: %s" % load_intermediate
        (processes, qqacct_data,
         user_hash) = cPickle.load(open(load_intermediate, 'rb'))

    else:
        print "getting qqacct data between %s and %s" % (str(start_time),
                                                         str(end_time))
        (q_status, qqacct_data) = get_job_data(start_time, end_time)
        print "getting process data (may take a long time):"
        (h_status, processes) = get_processes(filenames)
        identify_scripts(processes)
        identify_users(processes)
        processes = integrate_job_data(processes, qqacct_data)
        summaries = summarize_data(processes)

    if save_intermediate is not None:
        print "Saving to intermediate data file: %s" % save_intermediate
        h5file = "%s.summary.h5" % save_intermediate
        for key in summaries.keys():
            summaries[key].to_hdf(h5file, key)
        cPickle.dump((processes, qqacct_data, user_hash),
                     open(save_intermediate, 'wb'))
        processes.to_hdf(save_intermediate, "processes")
        #qqacct_data.to_hdf(save_intermediate, "qqacct")

    if q_status == 0 and h_status == 0:
        pass
Пример #38
0
 def expiration_date(self):
     """Return the subscription expiration as a UTC datetime object."""
     return datetime.combine(
         dt_util.parse_date(self.claims['custom:sub-exp']),
         datetime.min.time()).replace(tzinfo=dt_util.UTC)
Пример #39
0
    def make_person_sequence(self):
        start_time = time.time()
        # 환자별로 데이터의 시작시간과 종료시간을 구한다.
        timerange_df = self.cohort_df.groupby('SUBJECT_ID').agg({
            'COHORT_START_DATE':
            'min',
            'COHORT_END_DATE':
            'max'
        })
        timerange_df['START_DATE'] = timerange_df.COHORT_START_DATE.dt.date
        timerange_df[
            'START_HOURGRP'] = timerange_df.COHORT_START_DATE.dt.hour // self.group_hour
        timerange_df['END_DATE'] = timerange_df.COHORT_END_DATE.dt.date
        timerange_df[
            'END_HOURGRP'] = timerange_df.COHORT_END_DATE.dt.hour // self.group_hour
        timerange_df = timerange_df.drop(
            ['COHORT_START_DATE', 'COHORT_END_DATE'], axis=1)

        demographic_ary = self.person_df.sort_values('PERSON_ID',
                                                     ascending=True).values
        condition_ary = self.condition_df.sort_values(
            ['PERSON_ID', 'DATE', 'HOURGRP'], ascending=True).values
        measurement_ary = self.measurement_df.sort_values(
            ['PERSON_ID', 'DATE', 'HOURGRP'], ascending=True).values
        timerange_ary = timerange_df.sort_values(
            'SUBJECT_ID', ascending=True).reset_index().values

        demographic_cols = ["AGE_HOUR", "GENDER"]
        condition_cols = self.condition_df.columns[3:]
        measurement_cols = self.measurement_df.columns[3:]

        # 빈 Time Range 없게 시간대 정보를 채움
        max_hourgrp = (24 // self.group_hour) - 1

        key_list = []
        for person_id, start_date, start_hourgrp, end_date, end_hourgrp in timerange_ary:
            cur_date = start_date
            cur_hourgrp = start_hourgrp

            while True:
                key_list.append((person_id, cur_date, cur_hourgrp))

                cur_hourgrp += 1  # 1 그룹시간만큼 탐색
                if cur_hourgrp > max_hourgrp:  # 다음 날짜로 넘어감
                    cur_date = cur_date + timedelta(days=1)
                    cur_hourgrp = 0

                if cur_date > end_date or \
                   (cur_date == end_date and cur_hourgrp >= end_hourgrp):
                    # 끝까지 탐색함
                    break

        # 시간대 정보에 따라 데이터를 채워 넣는다
        demographic_idx = condition_idx = measurement_idx = 0
        prev_person_id = None
        prev_conditions = None

        data_cols = list(demographic_cols) + list(measurement_cols) + list(
            condition_cols)
        data_list = np.zeros((len(key_list), len(data_cols)), dtype=np.float32)
        for idx, row in enumerate(key_list):
            person_id, date, hourgrp = row

            col_start_idx = col_end_idx = 0
            col_end_idx += len(demographic_cols)
            # Demographic 추가
            while True:
                if demographic_idx >= len(demographic_ary):
                    break

                demographic_row = demographic_ary[demographic_idx]
                demographic_person_id = demographic_row[0]
                # 시간 계산을 위해 tz를 동일하게 맞춤.
                demographic_age = datetime.combine(
                    date, datetime_time(hour=hourgrp,
                                        tzinfo=timezone.utc)).astimezone(
                                            pytz.utc) - demographic_row[1]
                demographic_gender = demographic_row[2]
                demographic_data = [
                    demographic_age.total_seconds() // 3600.,
                    demographic_gender
                ]

                state = 0  # 0: 다음 데이터 탐색 1: 맞는 데이터 찾음 2: 맞는 데이터 없음
                if demographic_person_id > person_id:  # 다음 환자로 넘어감
                    state = 2
                elif demographic_person_id == person_id:  # 맞는 데이터
                    state = 1

                if state == 0:  # 계속 탐색
                    demographic_idx += 1
                elif state == 1:  # 데이터 찾음
                    data_list[idx,
                              col_start_idx:col_end_idx] = demographic_data
                    break
                elif state == 2:  # 맞는 데이터가 없음
                    break

            # Measurement 탐색
            col_start_idx = col_end_idx
            col_end_idx += len(measurement_cols)
            while True:
                if measurement_idx >= len(measurement_ary):
                    break

                measurement_row = measurement_ary[measurement_idx]
                measurement_person_id = measurement_row[0]
                measurement_date = measurement_row[1]
                measurement_hourgrp = measurement_row[2]
                measurement_data = measurement_row[3:]

                state = 0  # 0: 다음 데이터 탐색 1: 맞는 데이터 찾음 2: 맞는 데이터 없음
                if measurement_person_id > person_id:  # 다음 환자로 넘어감
                    state = 2
                elif measurement_person_id == person_id:
                    if measurement_date > date:  # 다음 날짜로 넘어감
                        state = 2
                    elif measurement_date == date:
                        if measurement_hourgrp > hourgrp:  # 다음 그룹시간으로 넘어감
                            state = 2
                        elif measurement_hourgrp == hourgrp:  # 맞는 데이터
                            state = 1

                if state == 0:  # 계속 탐색
                    measurement_idx += 1
                elif state == 1:  # 데이터 찾음
                    data_list[idx,
                              col_start_idx:col_end_idx] = measurement_data
                    measurement_idx += 1
                    break
                elif state == 2:  # 맞는 데이터가 없음
                    break

            # Condition 탐색
            col_start_idx = col_end_idx
            col_end_idx += len(condition_cols)
            # 이전과 다른 환자임. condition정보 reset
            if prev_person_id != person_id:
                prev_conditions = np.array([0] * len(condition_cols))

            while True:
                if condition_idx >= len(condition_ary):
                    break

                condition_row = condition_ary[condition_idx]
                condition_person_id = condition_row[0]
                condition_date = condition_row[1]
                condition_hourgrp = condition_row[2]
                condition_data = condition_row[3:]

                state = 0  # 0: 다음 데이터 탐색 1: 맞는 데이터 찾음 2: 맞는 데이터 없음
                if condition_person_id > person_id:  # 다음 환자로 넘어감
                    state = 2
                elif condition_person_id == person_id:
                    if condition_date > date:  # 다음 날짜로 넘어감
                        state = 2
                    elif condition_date == date:
                        if condition_hourgrp > hourgrp:  # 다음 그룹시간으로 넘어감
                            state = 2
                        elif condition_hourgrp == hourgrp:  # 맞는 데이터
                            state = 1

                if state == 0:  # 계속 탐색
                    condition_idx += 1
                elif state == 1:  # 데이터 찾음
                    # 이전 Condition 정보와 나중 Condition 정보를 합친다
                    prev_conditions = np.array(prev_conditions) + np.array(
                        condition_data)
                    data_list[idx, col_start_idx:col_end_idx] = prev_conditions
                    condition_idx += 1
                    break
                elif state == 2:  # 맞는 데이터가 없음
                    break

            prev_person_id = person_id

        self.feature_ary = data_list
        self.feature_key_df = pd.DataFrame(
            key_list, columns=['PERSON_ID', 'DATE', 'HOURGRP'])
        print("data_loader make_person_sequence time:",
              time.time() - start_time)
Пример #40
0
 def date(self, d):
     return datetime.combine(iso_string_to_date(d), time())
Пример #41
0
 def __str__(self):
     return "%s, %s: %s - %s" % (
         self.room.name, self.get_day_of_week_display(), self.start_time,
         (datetime.combine(datetime.today(), self.start_time) +
          self.duration).time())
Пример #42
0
 def _parse_start_end(self, response):
     start, end = super()._parse_start_end(response)
     if end and (end - start).days >= 1:
         end = datetime.combine(start.date(), end.time())
     return start, end
Пример #43
0
    def taa_rough_timestamp(self) -> int:
        """Get a timestamp accurate to the day.

        Anything more accurate is a privacy concern.
        """
        return int(datetime.combine(date.today(), datetime.min.time()).timestamp())
Пример #44
0
def update_attendance_time(employee, attendance_date, in_time, out_time):
    twh = ''
    if attendance_date:
        attendance_date = out_date = datetime.strptime(attendance_date,
                                                       '%Y-%m-%d')
        if in_time and out_time:
            in_time_f = datetime.strptime(in_time, '%H:%M:%S').time()
            out_time_f = datetime.strptime(out_time, '%H:%M:%S').time()
            if out_time_f < in_time_f:
                out_date = (add_days(attendance_date, 1)).date()
            in_time = datetime.strptime(in_time, '%H:%M:%S').time()
            in_time = datetime.combine(attendance_date, in_time)
            out_time = datetime.strptime(out_time, '%H:%M:%S').time()
            out_time = datetime.combine(out_date, out_time)

            twh = out_time - in_time
            status = 'Absent'
            if twh > timedelta(hours=4):
                status = 'Half Day'
            if twh >= timedelta(hours=8):
                status = 'Present'
            if twh:
                twh_seconds = twh.total_seconds()
                minutes = twh_seconds // 60
                hours = minutes // 60
                twh = "%02d hr %02d min" % (hours, minutes % 60)
            if frappe.db.exists("Attendance", {
                    "employee": employee,
                    "attendance_date": attendance_date
            }):
                att = frappe.db.get_value("Attendance", {
                    "employee": employee,
                    "attendance_date": attendance_date
                }, "name")
                # frappe.errprint(att)
                exist_att = frappe.get_doc("Attendance", att)
                if not exist_att.in_time or not exist_att.out_time:
                    exist_att.update({
                        "biometric_id": employee,
                        "attendance_date": attendance_date,
                        "status": status,
                        "in_time": in_time,
                        "out_time": out_time,
                        "total_working_hours": twh,
                        "modified_status": "Miss Punch"
                    })
                    exist_att.save(ignore_permissions=True)
                    frappe.db.commit()
            else:
                exist_att = frappe.new_doc("Attendance")
                exist_att.update({
                    "employee": employee,
                    "biometric_id": employee,
                    "attendance_date": attendance_date,
                    "in_time": in_time,
                    "status": status,
                    "out_time": out_time,
                    "total_working_hours": twh,
                    "modified_status": "Miss Punch"
                })
                exist_att.save(ignore_permissions=True)
                exist_att.submit()
                frappe.db.commit()
        return "Ok"
Пример #45
0
def view_product(request, pid):
    if request.user.is_superuser:
        prod = get_object_or_404(Product, id=pid)
    else:
        prod = get_object_or_404(Product,
                                 id=pid,
                                 authorized_users__in=[request.user])
    engs = Engagement.objects.filter(product=prod)
    i_engs = Engagement.objects.filter(product=prod, active=False)
    scan_sets = ScanSettings.objects.filter(product=prod)
    tools = Tool_Product_Settings.objects.filter(product=prod).order_by('name')
    auth = request.user.is_staff or request.user in prod.authorized_users.all()
    creds = Cred_Mapping.objects.filter(
        product=prod).select_related('cred_id').order_by('cred_id')

    if not auth:
        # will render 403
        raise PermissionDenied

    ct = ContentType.objects.get_for_model(prod)
    product_cf = CustomField.objects.filter(content_type=ct)
    product_metadata = {}

    for cf in product_cf:
        cfv = CustomFieldValue.objects.filter(field=cf, object_id=prod.id)
        if len(cfv):
            product_metadata[cf.name] = cfv[0].value

    try:
        start_date = Finding.objects.filter(
            test__engagement__product=prod).order_by('date')[:1][0].date
    except:
        start_date = timezone.now()

    end_date = timezone.now()

    tests = Test.objects.filter(engagement__product=prod)

    verified_findings = Finding.objects.filter(
        test__engagement__product=prod,
        date__range=[start_date, end_date],
        false_p=False,
        verified=True,
        duplicate=False,
        out_of_scope=False).order_by("date")

    week_date = end_date - timedelta(
        days=7)  # seven days and /newnewer are considered "new"

    new_verified_findings = Finding.objects.filter(
        test__engagement__product=prod,
        date__range=[week_date, end_date],
        false_p=False,
        verified=True,
        duplicate=False,
        out_of_scope=False).order_by("date")

    open_findings = Finding.objects.filter(test__engagement__product=prod,
                                           date__range=[start_date, end_date],
                                           false_p=False,
                                           verified=True,
                                           duplicate=False,
                                           out_of_scope=False,
                                           mitigated__isnull=True)

    closed_findings = Finding.objects.filter(
        test__engagement__product=prod,
        date__range=[start_date, end_date],
        false_p=False,
        verified=True,
        duplicate=False,
        out_of_scope=False,
        mitigated__isnull=False)

    start_date = timezone.make_aware(
        datetime.combine(start_date, datetime.min.time()))

    r = relativedelta(end_date, start_date)
    weeks_between = int(
        ceil((((r.years * 12) + r.months) * 4.33) + (r.days / 7)))
    if weeks_between <= 0:
        weeks_between += 2

    punchcard, ticks, highest_count = get_punchcard_data(
        verified_findings, weeks_between, start_date)
    add_breadcrumb(parent=prod, top_level=False, request=request)

    open_close_weekly = OrderedDict()
    new_weekly = OrderedDict()
    severity_weekly = OrderedDict()
    critical_weekly = OrderedDict()
    high_weekly = OrderedDict()
    medium_weekly = OrderedDict()

    for v in verified_findings:
        iso_cal = v.date.isocalendar()
        x = iso_to_gregorian(iso_cal[0], iso_cal[1], 1)
        y = x.strftime("<span class='small'>%m/%d<br/>%Y</span>")
        x = (tcalendar.timegm(x.timetuple()) * 1000)
        if x not in critical_weekly:
            critical_weekly[x] = {'count': 0, 'week': y}
        if x not in high_weekly:
            high_weekly[x] = {'count': 0, 'week': y}
        if x not in medium_weekly:
            medium_weekly[x] = {'count': 0, 'week': y}

        if x in open_close_weekly:
            if v.mitigated:
                open_close_weekly[x]['closed'] += 1
            else:
                open_close_weekly[x]['open'] += 1
        else:

            if v.mitigated:
                open_close_weekly[x] = {'closed': 1, 'open': 0, 'accepted': 0}
            else:
                open_close_weekly[x] = {'closed': 0, 'open': 1, 'accepted': 0}
            open_close_weekly[x]['week'] = y

        if x in severity_weekly:
            if v.severity in severity_weekly[x]:
                severity_weekly[x][v.severity] += 1
            else:
                severity_weekly[x][v.severity] = 1
        else:
            severity_weekly[x] = {
                'Critical': 0,
                'High': 0,
                'Medium': 0,
                'Low': 0,
                'Info': 0
            }
            severity_weekly[x][v.severity] = 1
            severity_weekly[x]['week'] = y

        if v.severity == 'Critical':
            if x in critical_weekly:
                critical_weekly[x]['count'] += 1
            else:
                critical_weekly[x] = {'count': 1, 'week': y}
        elif v.severity == 'High':
            if x in high_weekly:
                high_weekly[x]['count'] += 1
            else:
                high_weekly[x] = {'count': 1, 'week': y}
        elif v.severity == 'Medium':
            if x in medium_weekly:
                medium_weekly[x]['count'] += 1
            else:
                medium_weekly[x] = {'count': 1, 'week': y}

    test_data = {}
    for t in tests:
        if t.test_type.name in test_data:
            test_data[t.test_type.name] += t.verified_finding_count()
        else:
            test_data[t.test_type.name] = t.verified_finding_count()

    return render(
        request, 'dojo/view_product.html', {
            'prod': prod,
            'product_metadata': product_metadata,
            'engs': engs,
            'i_engs': i_engs,
            'scan_sets': scan_sets,
            'tools': tools,
            'creds': creds,
            'verified_findings': verified_findings,
            'open_findings': open_findings,
            'closed_findings': closed_findings,
            'new_findings': new_verified_findings,
            'start_date': start_date,
            'punchcard': punchcard,
            'ticks': ticks,
            'highest_count': highest_count,
            'open_close_weekly': open_close_weekly,
            'severity_weekly': severity_weekly,
            'critical_weekly': critical_weekly,
            'high_weekly': high_weekly,
            'medium_weekly': medium_weekly,
            'test_data': test_data,
            'user': request.user,
            'authorized': auth
        })
Пример #46
0
def filter2query(filter_, user_id=None):
    """Convert filter syntax to ElasticSearch query

    :param dict filter_: filter syntax, as used in saved_searches endpoint
    :return dict: ElasticSearch query DSL usable with service.search method
    """
    # we'll delete key while we handle them, to check that all has been managed at the end
    search_query = filter_["query"].copy()
    query_must = []
    query_must_not = []
    post_filter = []
    post_filter_must_not = []

    # controlled vocabularies can be overriden in settings
    search_cvs = app.config.get("search_cvs", SEARCH_CVS)

    for cv in search_cvs:
        if cv["id"] in search_query and cv["field"] != cv["id"]:
            terms = json.loads(search_query.pop(cv["id"]))
            query_must.append({"terms": {cv["field"] + ".qcode": terms}})

    try:
        query_string = search_query.pop("q")
    except KeyError:
        pass
    else:
        for cv in search_cvs:
            if cv["field"] != cv["id"]:
                query_string.replace(cv["id"] + ".qcode:(",
                                     cv["field"] + "q.code:(")
        query_must.append({
            "query_string": {
                "query": query_string,
                "default_operator": "AND"
            }
        })

    to_delete = []
    for key, value in search_query.items():
        if key == "from_desk":
            desk = value.split("-")
            if len(desk) == 2:
                if desk[1] == "authoring":
                    query_must.append(
                        {"term": {
                            "task.last_authoring_desk": desk[0]
                        }})
                else:
                    query_must.append(
                        {"term": {
                            "task.last_production_desk": desk[0]
                        }})
            else:
                logger.warning('unexpected "from_desk" value: {value}'.format(
                    value=value))
        elif key == "to_desk":
            desk = value.split("-")
            if len(desk) == 2:
                query_must.append({"term": {"task.desk": desk[0]}})
                if "from_desk" not in filter_["query"]:
                    if desk[1] == "authoring":
                        field = "task.last_production_desk"
                    else:
                        field = "task.last_authoring_desk"
                    query_must.append({"exists": {field: field}})
            else:
                logger.warning('unexpected "from_desk" value: {value}'.format(
                    value=value))
        elif key == "spike":
            if value == "include":
                pass
            elif value == "only":
                query_must.append({"term": {"state": "spiked"}})
            else:
                query_must_not.append({"term": {"state": "spiked"}})
        elif key == "featuremedia" and value:
            query_must.append(
                {"exists": {
                    "field": "associations.featuremedia"
                }})
        elif key == "subject":
            terms = json.loads(value)
            query_must.append({
                "bool": {
                    "should": [{
                        "terms": {
                            "subject.qcode": terms
                        }
                    }, {
                        "terms": {
                            "subject.parent": terms
                        }
                    }],
                    "minimum_should_match":
                    1,
                }
            })
        elif key == "company_codes":
            terms = json.loads(value)
            query_must.append({"terms": {"company_codes.qcode": terms}})
        elif key == "marked_desks":
            terms = json.loads(value)
            query_must.append({"terms": {"marked_desks.desk_id": terms}})
        elif key == "ignoreKilled":
            query_must_not.append({"terms": {"state": ["killed", "recalled"]}})
        elif key == "onlyLastPublished":
            query_must_not.append(
                {"term": {
                    "last_published_version": "false"
                }})
        elif key == "ignoreScheduled":
            query_must_not.append({"term": {"state": "scheduled"}})
        elif key == "raw":
            query_must.append({
                "query_string": {
                    "query": value,
                    "lenient": False,
                    "default_operator": "AND",
                },
            })
        else:
            continue
        to_delete.append(key)

    for key in to_delete:
        del search_query[key]

    for key, field in POST_FILTER_MAP.items():
        value = search_query.pop(key, None)
        if value is not None:
            try:
                post_filter.append({"terms": {field: json.loads(value)}})
            except (ValueError, TypeError) as e:
                logger.warning(
                    'Invalid data received for post filter key="{key}" data="{value}" error="{e}"'
                    .format(key=key, e=e, value=value))
                # the value is probably not JSON encoded as expected, we try directly the value
                post_filter.append({"terms": {field: value}})
        else:
            value = search_query.pop("not" + key, None)
            if value is not None:
                post_filter_must_not.append(
                    {"terms": {
                        field: json.loads(value)
                    }})

    # ingest provider
    ingest_provider = search_query.pop("ingest_provider", None)
    if ingest_provider is not None:
        post_filter.append({"term": {"ingest_provider": ingest_provider}})

    # used by AAP multimedia datalayer
    credit_qcode = search_query.pop("creditqcode", None)
    if credit_qcode is not None:
        values = json.loads(credit_qcode)
        post_filter.append({"terms": {"credit": [v["value"] for v in values]}})

    # date filters
    tz = pytz.timezone(app.config["DEFAULT_TIMEZONE"])
    range_ = {}
    to_delete = []
    for field in DATE_FIELDS:
        value = search_query.get(field)
        if value in DATE_FILTERS:
            range_[field] = DATE_FILTERS[value]
            to_delete.append(field)
        else:
            field_suff = field + "to"
            value = search_query.get(field_suff)
            if value:
                to_delete.append(field_suff)
                field_range = range_.setdefault(field, {})
                try:
                    date = datetime.strptime(value, DATE_FORMAT)
                except ValueError:
                    # the value doesn't correspond to DATE_FORMAT,
                    # it may be using ES date math syntax
                    field_range["lte"] = value
                else:
                    date = tz.localize(
                        datetime.combine(date, datetime.min.time()))
                    field_range["lte"] = date.isoformat()

            field_suff = field + "from"
            value = search_query.get(field_suff)
            if value:
                to_delete.append(field_suff)
                field_range = range_.setdefault(field, {})
                try:
                    date = datetime.strptime(value, DATE_FORMAT)
                except ValueError:
                    # same as above
                    field_range["gte"] = value
                else:
                    date = tz.localize(
                        datetime.combine(date, datetime.max.time()))
                    field_range["gte"] = date.isoformat()

    if range_:
        post_filter.append({"range": range_})
    for key in to_delete:
        del search_query[key]

    # remove other users drafts
    if user_id is not None:
        query_must.append({
            "bool": {
                "should": [
                    {
                        "bool": {
                            "must": [{
                                "term": {
                                    "state": "draft"
                                }
                            }, {
                                "term": {
                                    "original_creator": user_id
                                }
                            }]
                        }
                    },
                    {
                        "bool": {
                            "must_not": {
                                "terms": {
                                    "state": ["draft"]
                                }
                            }
                        }
                    },
                ]
            }
        })

    # this is needed for archived collection
    query_must_not.append({"term": {"package_type": "takes"}})

    query = {
        "query": {
            "bool": {
                "must": query_must,
                "must_not": query_must_not
            }
        }
    }
    if post_filter or post_filter_must_not:
        query["post_filter"] = {
            "bool": {
                "must": post_filter,
                "must_not": post_filter_must_not
            }
        }

    query["sort"] = {"versioncreated": "desc"}

    search_query.pop("repo", None)

    if "params" in search_query and (search_query['params'] is None or
                                     not json.loads(search_query['params'])):
        del search_query['params']

    if search_query:
        logger.warning(
            "All query fields have not been used, remaining fields: {search_query}"
            .format(search_query=search_query))

    return query
Пример #47
0
 def minutes(self):
     current = datetime.combine(self.day.date, self.time)
     start = datetime.combine(self.day.date, datetime.min.time())
     return int((current - start).seconds / 60)
def rounding_shouldStartAfter(start_after, auction, use_from=datetime(2016, 6, 1, tzinfo=TZ)):
    if (auction.enquiryPeriod and auction.enquiryPeriod.startDate or get_now()) > use_from and not (SANDBOX_MODE and auction.submissionMethodDetails and u'quick' in auction.submissionMethodDetails):
        midnigth = datetime.combine(start_after.date(), time(0, tzinfo=start_after.tzinfo))
        if start_after >= midnigth:
            start_after = midnigth + timedelta(1)
    return start_after
Пример #49
0
 def get_start_attendance_time(self, obj):
     return (datetime.combine(datetime.now().date(), obj.start) -
             timedelta(minutes=10)).time()
Пример #50
0
def get_date_max_time(date_str):
    return datetime.combine(get_date_by_str(date_str), time.max)
Пример #51
0
def all_data():
    """Create some sample data."""

    # In case this is run more than once, empty out existing data
    print "deleting data"
    db.drop_all()

    db.create_all()

    # Add sample employees and departments
    with open('seed-data/users_seed.csv', 'rb') as ride_data:
        reader = csv.reader(ride_data,
                            quotechar="'",
                            delimiter=',',
                            quoting=csv.QUOTE_ALL,
                            skipinitialspace=True)
        reader.next()
        for row in reader:
            user = User(user_id=row[0],
                        fb_userid=row[1],
                        first_name=row[2],
                        last_name=row[3],
                        email=row[4],
                        image=row[5])
            db.session.add(user)
            db.session.commit()

    with open('seed-data/rides_seed.csv', 'rb') as ride_data:

        reader = csv.reader(ride_data,
                            quotechar="'",
                            delimiter=',',
                            quoting=csv.QUOTE_ALL,
                            skipinitialspace=True)
        reader.next()
        gmaps = googlemaps.Client(key=GOOGLE_KEY)

        geocode = defaultdict(defaultdict)

        for row in reader:
            route = row[15]

            if not geocode[route]:

                start_lat = row[3]
                start_lng = row[4]
                end_lat = row[5]
                end_lng = row[6]
                time.sleep(1)
                g_start = geocoder.google('{}, {}'.format(
                    start_lat, start_lng))
                time.sleep(1)
                g_end = geocoder.google('{}, {}'.format(end_lat, end_lng))

                geocode[route]['start_lat'] = start_lat
                geocode[route]['start_lng'] = start_lng
                geocode[route]['start_number'] = g_start.housenumber
                geocode[route]['start_street'] = g_start.street
                geocode[route]['start_city'] = g_start.city
                geocode[route]['start_state'] = g_start.state
                geocode[route]['start_zip'] = g_start.postal

                geocode[route]['end_lat'] = end_lat
                geocode[route]['end_lng'] = end_lng
                geocode[route]['end_number'] = g_end.housenumber
                geocode[route]['end_street'] = g_end.street
                geocode[route]['end_city'] = g_end.city
                geocode[route]['end_state'] = g_end.state
                geocode[route]['end_zip'] = g_end.postal

                start_time = datetime.strptime('4:00 PM', '%I:%M %p')
                today = datetime.now().date()
                start_datetime = datetime.combine(
                    datetime.now().date() + timedelta(days=1),
                    start_time.time())

                tz = state_to_timezone(geocode[route]['start_state'])
                start_time_aware = pytz.timezone(tz).localize(start_datetime)

                try:
                    directions_result = gmaps.directions(
                        "{},{}".format(start_lat, start_lng),
                        "{},{}".format(end_lat, end_lng),
                        traffic_model='best_guess',
                        departure_time=start_time_aware)

                    geocode[route]['duration'] = directions_result[0]['legs'][
                        0]['duration']['text']

                    geocode[route]['mileage'] = directions_result[0]['legs'][
                        0]['distance']['text']

                    print '\n\nduration: {}, mileage{}\n\n'.format(
                        geocode[route]['duration'], geocode[route]['mileage'])
                except Exception, e:
                    print '\n\nDuration/Mileage API Failed\n\n'
                    geocode[route]['mileage'] = None
                    geocode[route]['duration'] = None
                    print "Unexpected error:", start_lat, start_lng, end_lat, end_lng
                    print str(e)

            start_time = datetime.strptime(row[7], '%I:%M %p')
            today = datetime.now().date()
            day_offset = int(row[14])
            start_datetime = datetime.combine(
                datetime.now().date() + timedelta(days=day_offset),
                start_time.time())

            tz = state_to_timezone(geocode[route]['start_state'])
            # localize to US/Pacific
            start_time_aware = pytz.timezone(tz).localize(start_datetime)

            # Normalize to UTC
            start_time_utc = pytz.utc.normalize(start_time_aware)

            ride = Ride(
                driver=row[0],
                seats=row[1],
                cost=row[2],

                # Start Location
                start_lat=geocode[route]['start_lat'],
                start_lng=geocode[route]['start_lng'],
                start_number=geocode[route]['start_number'],
                start_street=geocode[route]['start_street'],
                start_city=geocode[route]['start_city'],
                start_state=geocode[route]['start_state'],
                start_zip=geocode[route]['start_zip'],
                # End Location
                end_lat=geocode[route]['end_lat'],
                end_lng=geocode[route]['end_lng'],
                end_number=geocode[route]['end_number'],
                end_street=geocode[route]['end_street'],
                end_city=geocode[route]['end_city'],
                end_state=geocode[route]['end_state'],
                end_zip=geocode[route]['end_zip'],

                # Date/Time
                start_timestamp=start_time_utc,

                #Details
                car_type=row[9],
                luggage=row[10],
                comments=row[11],
                pickup_window=row[12],
                detour=row[13],
                mileage=geocode[route]['mileage'],
                duration=geocode[route]['duration'])

            db.session.add(ride)
            db.session.commit()
        print geocode
    def update_recurring_events(self, updates, original, update_method):
        historic, past, future = self.get_recurring_timeline(original)

        # Determine if the selected event is the first one, if so then
        # act as if we're changing future events
        if len(historic) == 0 and len(past) == 0:
            update_method = UPDATE_FUTURE

        if update_method == UPDATE_FUTURE:
            new_series = [original] + future
        else:
            new_series = past + [original] + future

        # Release the Lock on the selected Event
        remove_lock_information(updates)

        # Get the timezone from the original Event (as the series was created with that timezone in mind)
        timezone = original['dates']['tz']

        # First find the hour and minute of the start date in local time
        start_time = utc_to_local(timezone, updates['dates']['start']).time()

        # Next convert that to seconds since midnight (which gives us a timedelta instance)
        delta_since_midnight = datetime.combine(date.min,
                                                start_time) - datetime.min

        # And calculate the new duration of the events
        duration = updates['dates']['end'] - updates['dates']['start']

        for event in new_series:
            if not event.get(config.ID_FIELD):
                continue

            new_updates = {'dates': deepcopy(event['dates'])} \
                if event.get(config.ID_FIELD) != original.get(config.ID_FIELD) else updates

            # Calculate midnight in local time for this occurrence
            start_of_day_local = utc_to_local(timezone, event['dates']['start'])\
                .replace(hour=0, minute=0, second=0)

            # Then convert midnight in local time to UTC
            start_date_time = local_to_utc(timezone, start_of_day_local)

            # Finally add the delta since midnight
            start_date_time += delta_since_midnight

            # Set the new start and end times
            new_updates['dates']['start'] = start_date_time
            new_updates['dates']['end'] = start_date_time + duration

            if event.get(TO_BE_CONFIRMED_FIELD):
                new_updates[TO_BE_CONFIRMED_FIELD] = False

            # Set '_planning_schedule' on the Event item
            self.set_planning_schedule(new_updates)

            if event.get(config.ID_FIELD) != original.get(config.ID_FIELD):
                new_updates['skip_on_update'] = True
                self.patch(event[config.ID_FIELD], new_updates)
                app.on_updated_events_update_time(
                    new_updates, {'_id': event[config.ID_FIELD]})
def iso_today_starttime():
    today = date.today()
    t = time(0,0,0)
    starttime = datetime.combine(today,t).replace(tzinfo=tzlocal())
    return starttime.isoformat()
Пример #54
0
 def isWithinDateRange(self, date):
     start = datetime.combine(self.DATE_FROM, datetime.min.time())
     end = datetime.combine(self.DATE_TO, datetime.min.time())
     return True if start < date < end else False
Пример #55
0
 def _convert_date(self, d):
     d = datetime.combine(d, time(hour=0, minute=0))
     d = timezone.make_aware(d, timezone.get_current_timezone())
     return d
def iso_today_endtime():
    today = date.today()
    t = time(23,59,59)
    endtime = datetime.combine(today,t).replace(tzinfo=tzlocal())
    return endtime.isoformat()
Пример #57
0
 def seconds(self):
     current = datetime.combine(self.day.date, self.time)
     start = datetime.combine(self.day.date, datetime.min.time())
     return (current - start).seconds
Пример #58
0
    def interval_available_slots(self, start, end, offset, online=False):
        self.ensure_one()
        slots = {}
        # offset is 0 if slots request comes from online calendar

        # datetime.now() is in UTC inside odoo
        now = datetime.now() \
            - timedelta(minutes=SLOT_START_DELAY_MINS) \
            - timedelta(minutes=offset) \
            + timedelta(hours=1)
        now = now.replace(minute=0, second=0, microsecond=0)
        start_dt = datetime.strptime(start, DTF) - timedelta(minutes=offset)
        is_current_week = False
        if start_dt < now:
            is_current_week = True
            start_dt = now
        end_dt = datetime.strptime(end, DTF) - timedelta(minutes=offset)
        if online:
            # online fullcalendar initialized with timezone=false now, backend calendar has 'local' timezone by default

            # the difference between them is that backend calendar takes slots in utc and localizes them (using system timezone settings)
            # online calendar in its turn doesn't localize slots but expects them in proper timezone (that is from venue)

            # for online we should shift start so that after localization it would be from 00:00 for future weeks
            # and now for current week
            # (we localize online slots before place them on calendar in self.generate_slot)
            if not is_current_week:
                start_dt = start_dt - timedelta(
                    minutes=self.venue_id.tz_offset)
            end_dt = end_dt - timedelta(minutes=self.venue_id.tz_offset)

        if online and self.hours_to_prepare:
            online_min_dt = now + timedelta(hours=self.hours_to_prepare)
            start_dt = start_dt if start_dt > online_min_dt else online_min_dt

        if online and self.allowed_days_interval:
            online_max_dt = now + timedelta(days=self.allowed_days_interval)
            end_dt = end_dt if end_dt < online_max_dt else online_max_dt

        while start_dt < end_dt:
            start_d = online and \
                (start_dt + timedelta(minutes=self.venue_id.tz_offset)).date() or \
                start_dt.date()

            if not self.work_on_holidays and self.holidays_country_id:
                holidays = self.env['hr.holidays.public'].search([
                    ('country_id', '=', self.holidays_country_id.id),
                    ('year', '=', start_d.year),
                ],
                                                                 limit=1)
                if holidays[0].line_ids.filtered(
                        lambda r: r.date == start_d.strftime(DF)):
                    start_dt += timedelta(1)

            if self.calendar_id:
                for attendance in self.calendar_id.get_attendances_for_weekdays(
                    [start_d.weekday()])[0]:
                    min_from = int(
                        (attendance.hour_from - int(attendance.hour_from)) *
                        60)
                    min_to = int(
                        (attendance.hour_to - int(attendance.hour_to)) * 60)

                    x = datetime.combine(
                        start_d,
                        datetime.min.time().replace(hour=int(
                            attendance.hour_from),
                                                    minute=min_from))
                    # let fullcalendar event key be in UTC timezone like it is for non-calendar online slots
                    event_key = online and (x - timedelta(minutes=self.venue_id.tz_offset)).strftime(DTF) or \
                        x.strftime(DTF)
                    if attendance.hour_to == 0:
                        y = datetime.combine(
                            start_d, datetime.min.time()) + timedelta(1)
                    else:
                        y = datetime.combine(
                            start_d,
                            datetime.min.time().replace(hour=int(
                                attendance.hour_to),
                                                        minute=min_to))
                    if self.has_slot_calendar and \
                            (not online and x >= now or online and x >= now + timedelta(minutes=self.venue_id.tz_offset)) \
                            and x >= start_dt and y <= end_dt:
                        slots[event_key] = \
                            self.generate_slot(x, y, online=online, offset=offset, calendar=True)
                    elif not self.has_slot_calendar:
                        while x < y:
                            if not online and x >= now or online and x >= now + timedelta(
                                    minutes=self.venue_id.tz_offset):
                                slots[event_key] = \
                                    self.generate_slot(x,
                                                       x + timedelta(minutes=SLOT_DURATION_MINS),
                                                       online=online,
                                                       offset=offset,
                                                       calendar=True)
                            x += timedelta(minutes=SLOT_DURATION_MINS)
                start_dt += timedelta(1)
                start_dt = start_dt.replace(hour=0, minute=0, second=0)
            else:
                slots[start_dt.strftime(DTF)] = self.generate_slot(
                    start_dt,
                    start_dt + timedelta(minutes=SLOT_DURATION_MINS),
                    online=online,
                    offset=offset)
                start_dt += timedelta(minutes=SLOT_DURATION_MINS)

        return slots
Пример #59
0
def search(request, template_name="invoices/search.html"):
    start_amount = None
    end_amount = None
    tendered = None
    balance = None
    last_name = None
    start_dt = None
    end_dt = None
    search_criteria = None
    search_text = None
    search_method = None
    invoice_type = u''
    event = None
    event_id = None

    form = InvoiceSearchForm(request.GET)

    if form.is_valid():
        start_dt = form.cleaned_data.get('start_dt')
        end_dt = form.cleaned_data.get('end_dt')
        start_amount = form.cleaned_data.get('start_amount')
        end_amount = form.cleaned_data.get('end_amount')
        tendered = form.cleaned_data.get('tendered')
        balance = form.cleaned_data.get('balance')
        last_name = form.cleaned_data.get('last_name')
        search_criteria = form.cleaned_data.get('search_criteria')
        search_text = form.cleaned_data.get('search_text')
        search_method = form.cleaned_data.get('search_method')
        invoice_type = form.cleaned_data.get('invoice_type')
        event = form.cleaned_data.get('event')
        event_id = form.cleaned_data.get('event_id')

    if tendered:
        if 'void' in tendered:
            invoices = Invoice.objects.void()
        else:
            invoices = Invoice.objects.filter(status_detail=tendered)
    else:
        invoices = Invoice.objects.all()

    if start_dt:
        invoices = invoices.filter(
            create_dt__gte=datetime.combine(start_dt, time.min))
    if end_dt:
        invoices = invoices.filter(
            create_dt__lte=datetime.combine(end_dt, time.max))

    if start_amount:
        invoices = invoices.filter(total__gte=start_amount)
    if end_amount:
        invoices = invoices.filter(total__lte=end_amount)

    if balance == '0':
        invoices = invoices.filter(balance=0)
    elif balance == '1':
        invoices = invoices.filter(balance__gt=0)

    if last_name:
        invoices = invoices.filter(bill_to_last_name__iexact=last_name)

    owner = None
    if search_criteria and search_text:
        if search_criteria == 'owner_id':
            search_criteria = 'owner__id'
            try:
                search_text = int(search_text)
                [owner] = User.objects.filter(id=search_text)[:1] or [None]
            except:
                search_text = 0

        if search_method == 'starts_with':
            if isinstance(search_text, str):
                search_type = '__istartswith'
            else:
                search_type = '__startswith'
        elif search_method == 'contains':
            if isinstance(search_text, str):
                search_type = '__icontains'
            else:
                search_type = '__contains'
        else:
            if isinstance(search_text, str):
                search_type = '__iexact'
            else:
                search_type = '__exact'

        if all(
            [search_criteria == 'owner__id', search_method == 'exact', owner]):
            invoices = invoices.filter(
                Q(bill_to_email__iexact=owner.email)
                | Q(owner_id=owner.id))
        else:
            search_filter = {
                '%s%s' % (search_criteria, search_type): search_text
            }
            invoices = invoices.filter(**search_filter)

    if invoice_type:
        content_type = ContentType.objects.filter(app_label=invoice_type)
        invoices = invoices.filter(object_type__in=content_type)
        if invoice_type == 'events':
            # Set event filters
            event_set = set()
            if event:
                event_set.add(event.pk)
            if event_id:
                event_set.add(event_id)
            if event or event_id:
                invoices = invoices.filter(
                    registration__event__pk__in=event_set)

    if request.user.profile.is_superuser or has_perm(request.user,
                                                     'invoices.view_invoice'):
        invoices = invoices.order_by('-create_dt')
    else:
        invoices = invoices.filter(
            Q(creator=request.user) | Q(owner=request.user)
            | Q(bill_to_email__iexact=request.user.email)).order_by(
                '-create_dt')
    EventLog.objects.log()
    return render_to_resp(request=request,
                          template_name=template_name,
                          context={
                              'invoices': invoices,
                              'form': form,
                          })
def workhoursummary(month, year, page, row):

    try:
        lst = []

        day = timedelta(days=1)
        date1 = datetime(int(year), int(month), 1)
        d = date1
        dates = []
        while d.month == int(month):
            dates.append(d.strftime('%d-%m-%Y'))
            d += day

        user_list = User.query.filter_by(isActive=1)

        user_list = user_list.order_by(desc(User.id)).paginate(
            int(page), int(row), False).items

        if user_list:

            for user in user_list:

                current_month_work_hrs = 0

                data = {}
                data['user_id'] = user.id
                data[
                    'user_name'] = user.rkp_name if user.rkp_name != None and user.rkp_name != "" else user.first_name + (
                        "" if user.last_name == None else user.last_name)
                data['user_location'] = user.location
                data['start_of_employment'] = "" if (
                    user.employment_date == '' or user.employment_date
                    == None) else user.employment_date.strftime('%d-%m-%Y')
                data['accumulated_work_hrs'] = 0


                working_hrs_data = RKPLogin.query\
                                .filter(extract('month', RKPLogin.start_date) == int(month))\
                                .filter(extract('year', RKPLogin.start_date) == int(year)) \
                                .filter(extract('month', RKPLogin.stoped_date) == int(month)) \
                                .filter(extract('year', RKPLogin.stoped_date) == int(year)) \
                                .filter_by(user_id=user.id, rt_status=2, isActive=1).all()

                working_hrs_data_lst = []

                if working_hrs_data:

                    for day in dates:
                        day = datetime.strptime(day, "%d-%m-%Y")

                        last_hrs = datetime.combine(day, time.max)
                        first_hrs = datetime.combine(day, time.min)

                        for hrs_data in working_hrs_data:
                            start_date = hrs_data.start_date
                            stoped_date = hrs_data.stoped_date

                            mins_val = 0

                            if start_date.date() <= day.date(
                            ) <= stoped_date.date():

                                if start_date.date() == day.date(
                                ) and stoped_date.date() == day.date():
                                    difference_date = stoped_date - start_date
                                    mins = divmod(
                                        difference_date.total_seconds(), 60)
                                    mins_val = mins[0]

                                elif start_date.date() == day.date(
                                ) and stoped_date.date() != day.date():
                                    difference_date = last_hrs - start_date
                                    mins = divmod(
                                        difference_date.total_seconds(), 60)
                                    mins_val = mins[0]

                                elif start_date.date() != day.date(
                                ) and stoped_date.date() == day.date():
                                    difference_date = stoped_date - first_hrs
                                    mins = divmod(
                                        difference_date.total_seconds(), 60)
                                    mins_val = mins[0]

                                else:
                                    mins = 1440
                                    mins_val = mins

                                obj_already_exists = False
                                if working_hrs_data_lst:

                                    for obj in working_hrs_data_lst:
                                        if obj['date'] == day.strftime(
                                                '%d-%m-%Y'):
                                            obj_already_exists = True
                                            mins_data = obj['value'] if obj[
                                                'value'] != '' else '0:00'
                                            val_diff = datetime.strptime(
                                                mins_data,
                                                "%H:%M") - datetime.strptime(
                                                    "0:00", "%H:%M")
                                            val_mins = divmod(
                                                val_diff.total_seconds(), 60)
                                            mins_data = int(val_mins[0])
                                            tot_mins = mins_data + mins_val
                                            obj['value'] = "{}:{}".format(
                                                *divmod(int(tot_mins), 60))

                                if not obj_already_exists:
                                    date_arr = {}
                                    date_arr['date'] = day.strftime('%d-%m-%Y')
                                    date_arr['value'] = "{}:{}".format(
                                        *divmod(int(mins_val), 60))
                                    working_hrs_data_lst.append(date_arr)

                                current_month_work_hrs = current_month_work_hrs + mins_val

                            else:
                                obj_already_exists = False
                                if working_hrs_data_lst:
                                    for obj in working_hrs_data_lst:
                                        if obj['date'] == day.strftime(
                                                '%d-%m-%Y'):
                                            obj_already_exists = True

                                if not obj_already_exists:
                                    date_arr = {}
                                    date_arr['date'] = day.strftime('%d-%m-%Y')
                                    date_arr['value'] = ""
                                    working_hrs_data_lst.append(date_arr)
                else:
                    for day in dates:
                        day = datetime.strptime(day, "%d-%m-%Y")
                        date_arr = {}
                        date_arr['date'] = day.strftime('%d-%m-%Y')
                        date_arr['value'] = ""
                        working_hrs_data_lst.append(date_arr)

                data['working_hrs_lst'] = working_hrs_data_lst
                data['current_month_work_hrs'] = "{}:{}".format(
                    *divmod(int(current_month_work_hrs), 60))
                data['total_work_hrs'] = "{}:{}".format(
                    *divmod(int(current_month_work_hrs), 60))
                lst.append(data)

        response_object = {
            "ErrorCode": "9999",
            "summary_data": lst,
        }

        return response_object
    except Exception as e:
        logging.exception(e)
        response_object = {"message": str(e), "ErrorCode": "0000", "data": []}
        return response_object