def _test_file_time_getter_tz_handling_off(self, getter): # Django's TZ (and hence the system TZ) is set to Africa/Algiers which # is UTC+1 and has no DST change. We can set the Django TZ to something # else so that UTC, Django's TIME_ZONE, and the system timezone are all # different. now_in_algiers = timezone.make_aware(datetime.now()) with timezone.override(timezone.get_fixed_timezone(-300)): # At this point the system TZ is +1 and the Django TZ # is -5. self.assertFalse(self.storage.exists('test.file.tz.off')) f = ContentFile('custom contents') f_name = self.storage.save('test.file.tz.off', f) self.addCleanup(self.storage.delete, f_name) dt = getter(f_name) # dt should be naive, in system (+1) TZ self.assertTrue(timezone.is_naive(dt)) # The three timezones are indeed distinct. naive_now = datetime.now() algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now) django_offset = timezone.get_current_timezone().utcoffset(naive_now) utc_offset = timezone.utc.utcoffset(naive_now) self.assertGreater(algiers_offset, utc_offset) self.assertLess(django_offset, utc_offset) # dt and naive_now should be the same effective time. self.assertLess(abs(dt - naive_now), timedelta(seconds=2)) # If we convert dt to an aware object using the Algiers # timezone then it should be the same effective time to # now_in_algiers. _dt = timezone.make_aware(dt, now_in_algiers.tzinfo) self.assertLess(abs(_dt - now_in_algiers), timedelta(seconds=2))
def email_daily_ratings(): """ Does email for yesterday's ratings (right after the day has passed). Sends an email containing all reviews for that day for certain app. """ if not waffle.switch_is_active('ratings'): return dt = datetime.datetime.today() - datetime.timedelta(1) yesterday = datetime.datetime(dt.year, dt.month, dt.day, 0, 0, 0) today = yesterday + datetime.timedelta(1) pretty_date = '%04d-%02d-%02d' % (dt.year, dt.month, dt.day) yesterday_reviews = Review.objects.filter(created__gte=yesterday, created__lt=today, addon__type=amo.ADDON_WEBAPP) # For each app in yesterday's set of reviews, gather reviews and email out. apps = set(review.addon for review in yesterday_reviews) for app in apps: # Email all reviews in one email for current app in loop. author_emails = app.authors.values_list('email', flat=True) subject = 'Firefox Marketplace reviews for %s on %s' % (app.name, pretty_date) context = {'reviews': (yesterday_reviews.filter(addon=app). order_by('-created')), 'base_url': settings.SITE_URL, 'pretty_date': pretty_date} send_mail_jinja(subject, 'ratings/emails/daily_digest.html', context, recipient_list=author_emails, perm_setting='app_new_review')
def setUp(self): super(GroupEventsOldestTest, self).setUp() self.login_as(user=self.user) project = self.create_project() min_ago = (timezone.now() - timedelta(minutes=1)).isoformat()[:19] two_min_ago = (timezone.now() - timedelta(minutes=2)).isoformat()[:19] self.event1 = self.store_event( data={ 'event_id': 'a' * 32, 'environment': 'staging', 'fingerprint': ['group_1'], 'timestamp': two_min_ago }, project_id=project.id, ) self.event2 = self.store_event( data={ 'event_id': 'b' * 32, 'environment': 'production', 'fingerprint': ['group_1'], 'timestamp': min_ago }, project_id=project.id, ) self.group = Group.objects.first()
def _doApiRequest(self, type, append=""): type_to_update_intervall = { 'playerData': timedelta(days=7), 'alliances': timedelta(days=1), 'players': timedelta(days=1), 'highscore': timedelta(hours=1), } api_data = self.cache.lookup(self.server+"_"+type+append) need_download = False if not api_data: logger.info("Need download because %s is not cached") need_download = True else: try: # exception when response is "player not found" timestamp = int(textextract(api_data, 'timestamp="', '"')) except: timestamp = os.path.getmtime(self.cache.get_path(self.server+"_"+type+append)) timestamp = datetime.fromtimestamp(timestamp) if timestamp + type_to_update_intervall[type] < datetime.now(): logger.info("Need download because %s is more than 12h old" % (self.server+"_"+type)) need_download = True if need_download: r = self.requests.get('http://'+self.server+'/api/'+type+'.xml'+append) self.cache.write(self.server+"_"+type+append, r.text) api_data = r.text return (need_download, type, append), api_data
def generate_inventory_data(self, inventories, is_online=1): start_day = datetime.date.today() end_day = start_day + datetime.timedelta(days=89) manual_confirm_count = [] if is_online == 1: day = start_day while day <= end_day: month = int("{}{:0>2d}".format(day.year, day.month)) for inventory in inventories: if inventory.month == month: manual_confirm_count.append(str(inventory.get_day(day.day))) break day = day + datetime.timedelta(days=1) else: #manual_confirm_count = ['0' for i in range(90)] manual_confirm_count = '0' * 90 data = {} data['chain_id'] = CHAIN_ID if inventories[0].merchant_id in SPEC_STOCK_PUSH: data['ota_id'] = SPEC_STOCK_PUSH[inventories[0].merchant_id] else: data['ota_id'] = 0 data['hotel_id'] = str(inventories[0].hotel_id) data['room_type_id'] = str(inventories[0].roomtype_id) data['manual_confirm_counts'] = '|'.join(manual_confirm_count) data['start_date'] = start_day data['end_date'] = end_day data['is_soldout_auto_close'] = 1 return data
def search(self, serie, numero, remitente, destinatario, sucursal, fecha): flat = serie or numero or remitente or destinatario or fecha tz = timezone.get_current_timezone() if flat: if fecha: "si se ingreso fecha" date = datetime.strptime(fecha, "%d/%m/%Y") end_date = timezone.make_aware(date, tz) start_date = end_date - timedelta(days=7) else: date = datetime.strptime("01/10/2015", "%d/%m/%Y") end_date = timezone.now() start_date = timezone.make_aware(date, tz) else: end_date = timezone.now() start_date = end_date - timedelta(days=7) busqueda = self.annotate( saldo=F('depositslip__total_amount')-F('amount') ).filter( depositslip__serie__icontains=serie, depositslip__number__icontains=numero, depositslip__sender__full_name__icontains=remitente, depositslip__addressee__full_name__icontains=destinatario, depositslip__state='2', depositslip__destination=sucursal, depositslip__created__range=(start_date, end_date) ) return busqueda
def test_external_task_sensor_fn(self): self.test_time_sensor() # check that the execution_fn works t = ExternalTaskSensor( task_id='test_external_task_sensor_check_delta', external_dag_id=TEST_DAG_ID, external_task_id=TEST_TASK_ID, execution_date_fn=lambda dt: dt + timedelta(0), allowed_states=['success'], dag=self.dag ) t.run( start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True ) # double check that the execution is being called by failing the test t2 = ExternalTaskSensor( task_id='test_external_task_sensor_check_delta', external_dag_id=TEST_DAG_ID, external_task_id=TEST_TASK_ID, execution_date_fn=lambda dt: dt + timedelta(days=1), allowed_states=['success'], timeout=1, poke_interval=1, dag=self.dag ) with self.assertRaises(exceptions.AirflowSensorTimeout): t2.run( start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True )
def get_chart_data(self, instance, max_days=90): if hasattr(instance, '_state'): db = instance._state.db else: db = 'default' if not has_charts(db): return [] hours = max_days * 24 today = datetime.datetime.now().replace(microsecond=0, second=0, minute=0) min_date = today - datetime.timedelta(hours=hours) method = get_sql_date_trunc('date', db) chart_qs = list(instance.messagecountbyminute_set\ .filter(date__gte=min_date)\ .extra(select={'grouper': method}).values('grouper')\ .annotate(num=Sum('times_seen')).values_list('grouper', 'num')\ .order_by('grouper')) if not chart_qs: return [] rows = dict(chart_qs) #just skip zeroes first_seen = hours while not rows.get(today - datetime.timedelta(hours=first_seen)) and first_seen > 24: first_seen -= 1 return [rows.get(today - datetime.timedelta(hours=d), 0) for d in xrange(first_seen, -1, -1)]
def test_go_live_page_will_be_published(self): # Connect a mock signal handler to page_published signal signal_fired = [False] signal_page = [None] def page_published_handler(sender, instance, **kwargs): signal_fired[0] = True signal_page[0] = instance page_published.connect(page_published_handler) page = SimplePage( title="Hello world!", slug="hello-world", live=False, go_live_at=timezone.now() - timedelta(days=1), ) self.root_page.add_child(instance=page) page.save_revision(approved_go_live_at=timezone.now() - timedelta(days=1)) p = Page.objects.get(slug='hello-world') self.assertFalse(p.live) self.assertTrue(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists()) management.call_command('publish_scheduled_pages') p = Page.objects.get(slug='hello-world') self.assertTrue(p.live) self.assertFalse(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists()) # Check that the page_published signal was fired self.assertTrue(signal_fired[0]) self.assertEqual(signal_page[0], page) self.assertEqual(signal_page[0], signal_page[0].specific)
def HolidayDateByYear(year): holidays = {str(year) + "-01-01": "New Year's Day", str(year) + "-12-24": "Christmas Eve", str(year) + "-12-25": "Christmas Day", str(year) + "-12-31": "New Year's Eve", str(year) + "-07-04": "Independence Day",} for k in holiday_formulas: temp_date = None if holiday_formulas[k][0] < 0: if holiday_formulas[k][2] > 11: days = 31 else: days = ((date(int(year), holiday_formulas[k][2] + 1, 1) - date(int(year), holiday_formulas[k][2], 1))).days temp_date = date(int(year), holiday_formulas[k][2], days) delta = -timedelta(days=1) else: start_day = (7 * holiday_formulas[k][0]) + 1 if start_day == 0: start_day = 1 temp_date = date(int(year), holiday_formulas[k][2], start_day) delta = timedelta(days=1) while not temp_date.strftime("%A") == holiday_formulas[k][1]: temp_date = temp_date + delta if not temp_date.month == holiday_formulas[k][2]: continue if len(holiday_formulas[k]) > 3: temp_date = temp_date + timedelta(days=holiday_formulas[k][3]) holidays[str(year) + "-" + str(holiday_formulas[k][2]).zfill(2) + "-" + str(temp_date.day).zfill(2)] = k return holidays
def gen_datetime(min_date=None, max_date=None): """Returns a random datetime value :param max_date: A valid ``datetime.datetime`` object. :param max_date: A valid ``datetime.datetime`` object. :raises: ``ValueError`` if arguments are not valid ``datetime.datetime`` objects. :returns: Random ``datetime.datetime`` object. """ _min_value = (datetime.datetime.now() - datetime.timedelta(365 * MIN_YEARS)) _max_value = (datetime.datetime.now() + datetime.timedelta(365 * MAX_YEARS)) if min_date is None: min_date = _min_value if max_date is None: max_date = _max_value # Validation if not isinstance(min_date, datetime.datetime): raise ValueError("%s is not a valid datetime.datetime object") if not isinstance(max_date, datetime.datetime): raise ValueError("%s is not a valid datetime.datetime object") # Check that max_date is not before min_date assert min_date < max_date # Pick a time between min and max dates diff = max_date - min_date seconds = random.randint(0, diff.days * 3600 * 24 + diff.seconds) return min_date + datetime.timedelta(seconds=seconds)
def get_expired_reporters(cls, today): # we'll check anybody who hasn't been seen in between 15 and 45 days expired_start = today - datetime.timedelta(45) expired_end = today - datetime.timedelta(15) # reporter reminder type expired_type = ReminderType.objects.get(name = "Inactive Reporter") inactive = [ r for r in Reporter.objects.all() if r.is_expired() ] reporters = set() for reporter in inactive: # get our most recent reminder reminders = Reminder.objects.filter(reporter=reporter, type=expired_type).order_by('-date') # we've had a previous reminder if reminders: last_reminder = reminders[0] # if we were last seen before the reminder, we've already been reminded, skip over try: if reporter.last_seen() < last_reminder.date: continue except: pass # otherwise, pop this reporter on reporters.add(reporter) return reporters
def daterange(start_date, end_date): if start_date <= end_date: for n in range((end_date - start_date).days + 1): yield start_date + datetime.timedelta(n) else: for n in range((start_date - end_date).days + 1): yield start_date - datetime.timedelta(n)
def test_08_publishing(self): self.create_default_page_set() page = Page.objects.get(pk=1) page2 = Page.objects.get(pk=2) self.is_published(page.get_absolute_url(), should_be=False) self.is_published(page2.get_absolute_url(), should_be=False) page.active = True page.save() page2.active = True page2.save() self.is_published(page.get_absolute_url(), should_be=True) self.is_published(page2.get_absolute_url(), should_be=True) old_publication = page.publication_date page.publication_date = datetime.now() + timedelta(days=1) page.save() self.is_published(page.get_absolute_url(), should_be=False) # Should be not accessible because of its parent's inactivity self.is_published(page2.get_absolute_url(), should_be=False) page.publication_date = old_publication page.publication_end_date = datetime.now() - timedelta(days=1) page.save() self.is_published(page.get_absolute_url(), should_be=False) # Should be not accessible because of its parent's inactivity self.is_published(page2.get_absolute_url(), should_be=False) page.publication_end_date = datetime.now() + timedelta(days=1) page.save() self.is_published(page.get_absolute_url(), should_be=True) self.is_published(page2.get_absolute_url(), should_be=True)
def test_maybe_timedelta(self): D = maybe_timedelta for i in (30, 30.6): self.assertEqual(D(i), timedelta(seconds=i)) self.assertEqual(D(timedelta(days=2)), timedelta(days=2))
def test_task_due_date(self): self.test_add_tasklist() today = datetime.datetime.utcnow().replace(tzinfo=utc) # 0 Not Due not_due_date = today + datetime.timedelta(days=7) not_due = TaskModels.Task.objects.create(name="Not Due", tasklist_id=1, due_date=not_due_date) self.assertEquals(not_due.is_due, 0) # 1 Due Today due_today_date = today + datetime.timedelta(seconds=10) due_today = TaskModels.Task.objects.create(name="Due Today", tasklist_id=1, due_date=due_today_date) self.assertEquals(due_today.is_due, 1) # -1 Overdue overdue_date = today overdue = TaskModels.Task.objects.create(name="Overdue", tasklist_id=1, due_date=overdue_date) self.assertEquals(overdue.is_due, -1)
def test_parse_timewindow_now_plus_delta(): now = datetime.datetime.now(tzutc()) start,end = parse_timewindow("+ 2 hours") assert start >= now assert start < now + datetime.timedelta(seconds=5) assert end > start assert end - start == datetime.timedelta(hours=2)
def _apply_datespan_shifts(self, datespan): if datespan and not isinstance(datespan, DateSpan): raise ValueError("datespan must be an instance of DateSpan") if datespan: datespan = copy.copy(datespan) now = datetime.datetime.utcnow() # make sure we don't go over the current day # remember, there is no timezone support for this yet if datespan.enddate > now: datespan.enddate = now datespan.enddate = datespan.enddate.replace(hour=23, minute=59, second=59, microsecond=999999) if self.fixed_datespan_days: datespan.startdate = datespan.enddate - datetime.timedelta(days=self.fixed_datespan_days, microseconds=-1) if self.fixed_datespan_months: # By making the assumption that the end date is always the end of the month # the first months adjustment is accomplished by moving the start date to # the beginning of the month. Any additional months are subtracted in the usual way start = self.get_first_day_of_month(datespan.enddate.year, datespan.enddate.month) start_year, start_month = add_months(start.year, start.month, -(self.fixed_datespan_months - 1)) datespan.startdate = start.replace(year=start_year, month=start_month) if self.startdate_shift: datespan.startdate = datespan.startdate + datetime.timedelta(days=self.startdate_shift) if self.enddate_shift: datespan.enddate = datespan.enddate + datetime.timedelta(days=self.enddate_shift) return datespan
def test1(self): """ sequence 4 is missed, so we should have straight line interpolation between 3 and 5. """ tvd = [(0, 1, 0, 1), (3, 2, 1./4, 2), (7, 3, -1./13, 3), (20, 3, 1./13, 5), # missed seq 4 (24, 1, 0, 9)] # missed 6,7,8 data = [] now = datetime(2001, 1, 1) for (t, v, d, s) in tvd: data.append((now + timedelta(minutes=1*t), v, d, s)) result = list(PartSplineReconstruct (src=SipPhenom(src=data, interval=timedelta(minutes=1)), threshold=0.1)) for ptup in result: self.assertTrue(ptup.dashed is not None) intvl = int(my_total_seconds((ptup.dt - now)) / my_total_seconds(timedelta(minutes=1))) if intvl >= 7 and intvl <= 20: self.assertEquals(ptup.sp, 3) if intvl > 20 and intvl <= 25: self.assertEquals(ptup.sp, (intvl - 20) * (1.-3.)/(24-20) + 3) for i, ptup in enumerate(result): intvl = int(my_total_seconds((ptup.dt - now)) / my_total_seconds(timedelta(minutes=1))) self.assertEquals(i, intvl)
def _parse_datetime_value(value): # timezones are not supported and are assumed UTC if value[-1] == 'Z': value = value[:-1] value_len = len(value) if value_len in (8, 10): value = datetime.strptime(value, '%Y-%m-%d').replace( tzinfo=timezone.utc, ) return [value, value + timedelta(days=1)] elif value[4] == '-': try: value = datetime.strptime(value, '%Y-%m-%dT%H:%M:%S').replace( tzinfo=timezone.utc, ) except ValueError: value = datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f').replace( tzinfo=timezone.utc, ) else: value = datetime.utcfromtimestamp(float(value)).replace( tzinfo=timezone.utc, ) return [value - timedelta(minutes=5), value + timedelta(minutes=6)]
def run(self, **kwargs): from apps.rss_feeds.models import Feed settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() # Active feeds feeds = Feed.objects.filter( next_scheduled_update__lte=now, active=True ).exclude( active_subscribers=0 ).order_by('?') Feed.task_feeds(feeds) # Mistakenly inactive feeds day = now - datetime.timedelta(days=1) feeds = Feed.objects.filter( last_update__lte=day, queued_date__lte=day, min_to_decay__lte=60*24, active_subscribers__gte=1 ).order_by('?')[:20] if feeds: Feed.task_feeds(feeds) week = now - datetime.timedelta(days=7) feeds = Feed.objects.filter( last_update__lte=week, queued_date__lte=day, active_subscribers__gte=1 ).order_by('?')[:20] if feeds: Feed.task_feeds(feeds)
def read_lock(self, lockfile): """ Read job info from existing lock file. Raise an IOError if the lockfile cannot be read """ lock = open(lockfile, 'r') temp = pickle.load(lock) lock.close() logging.debug("Read JobInfo from lock:\n%s", temp) self.__dict__ = temp.__dict__ if self.start_time > 0: walltime = int(time.time() - self.start_time) self.resources_used['walltime'] \ = str(datetime.timedelta(seconds=walltime)) (cput, mem, vmem, threads) = get_cpu_mem_info(self.pid) if cput > 0: self.resources_used['cput'] \ = str(datetime.timedelta(seconds=cput)) if mem > 0: self.resources_used['mem'] = format_bytes(mem) if vmem > 0: self.resources_used['vmem'] = format_bytes(vmem) if threads > 0: self.resources_used['threads'] = threads self.job_id = os.path.splitext(os.path.basename(lockfile))[0] self.lockfile = lockfile
def test_linked_in_add_to_profile_btn_not_appearing_without_config(self): # Without linked-in config don't show Add Certificate to LinkedIn button self.client.login(username="******", password="******") CourseModeFactory.create( course_id=self.course.id, mode_slug='verified', mode_display_name='verified', expiration_datetime=datetime.now(pytz.UTC) - timedelta(days=1) ) CourseEnrollment.enroll(self.user, self.course.id, mode='honor') self.course.start = datetime.now(pytz.UTC) - timedelta(days=2) self.course.end = datetime.now(pytz.UTC) - timedelta(days=1) self.course.display_name = u"Omega" self.course = self.update_course(self.course, self.user.id) download_url = 'www.edx.org' GeneratedCertificateFactory.create( user=self.user, course_id=self.course.id, status=CertificateStatuses.downloadable, mode='honor', grade='67', download_url=download_url ) response = self.client.get(reverse('dashboard')) self.assertEquals(response.status_code, 200) self.assertNotIn('Add Certificate to LinkedIn', response.content) response_url = 'http://www.linkedin.com/profile/add?_ed=' self.assertNotContains(response, escape(response_url))
def test_expiry_date_range(self): """ Test that the verifications are filtered on the given range. Email is not sent for any verification with expiry date out of range """ user = UserFactory.create() verification_in_range = self.create_and_submit(user) verification_in_range.status = 'approved' verification_in_range.expiry_date = now() - timedelta(days=1) verification_in_range.save() user = UserFactory.create() verification = self.create_and_submit(user) verification.status = 'approved' verification.expiry_date = now() - timedelta(days=5) verification.save() call_command('send_verification_expiry_email', '--days-range=2') # Check that only one email is sent self.assertEqual(len(mail.outbox), 1) # Verify that the email is not sent to the out of range verification expiry_email_date = SoftwareSecurePhotoVerification.objects.get(pk=verification.pk).expiry_email_date self.assertIsNone(expiry_email_date)
def get_datetime_at_period_ix(self, ix): """ Get the datetime at a given period. :param period: The index of the period. :returns: The datetime. """ if self.timestep_period_duration == TimePeriod.millisecond: return self.start_datetime + timedelta(milliseconds=ix) elif self.timestep_period_duration == TimePeriod.second: return self.start_datetime + timedelta(seconds=ix) elif self.timestep_period_duration == TimePeriod.minute: return self.start_datetime + timedelta(minutes=ix) elif self.timestep_period_duration == TimePeriod.hour: return self.start_datetime + timedelta(hours=ix) elif self.timestep_period_duration == TimePeriod.day: return self.start_datetime + relativedelta(days=ix) elif self.timestep_period_duration == TimePeriod.week: return self.start_datetime + relativedelta(days=ix*7) elif self.timestep_period_duration == TimePeriod.month: return self.start_datetime + relativedelta(months=ix) elif self.timestep_period_duration == TimePeriod.year: return self.start_datetime + relativedelta(years=ix)
def login_and_forward(request, user, forward=None, message=None): if user.is_suspended(): return forward_suspended_user(request, user) user.backend = "django.contrib.auth.backends.ModelBackend" login(request, user) if message is None: message = _("Welcome back %s, you are now logged in") % user.username request.user.message_set.create(message=message) if not forward: forward = request.session.get(ON_SIGNIN_SESSION_ATTR, reverse('index')) pending_data = request.session.get(PENDING_SUBMISSION_SESSION_ATTR, None) if pending_data and (user.email_isvalid or pending_data['type'] not in settings.REQUIRE_EMAIL_VALIDATION_TO): submission_time = pending_data['time'] if submission_time < datetime.datetime.now() - datetime.timedelta(minutes=int(settings.HOLD_PENDING_POSTS_MINUTES)): del request.session[PENDING_SUBMISSION_SESSION_ATTR] elif submission_time < datetime.datetime.now() - datetime.timedelta(minutes=int(settings.WARN_PENDING_POSTS_MINUTES)): user.message_set.create(message=(_("You have a %s pending submission.") % pending_data['data_name']) + " %s, %s, %s" % ( html.hyperlink(reverse('manage_pending_data', kwargs={'action': _('save')}), _("save it")), html.hyperlink(reverse('manage_pending_data', kwargs={'action': _('review')}), _("review")), html.hyperlink(reverse('manage_pending_data', kwargs={'action': _('cancel')}), _("cancel")) )) else: return manage_pending_data(request, _('save'), forward) return HttpResponseRedirect(forward)
def test_dry_run_flag(self): """ Test that the dry run flags sends no email and only logs the the number of email sent in each batch """ user = UserFactory.create() verification = self.create_and_submit(user) verification.status = 'approved' verification.expiry_date = now() - timedelta(days=1) verification.save() start_date = now() - timedelta(days=1) # using default days count = 1 with LogCapture(LOGGER_NAME) as logger: call_command('send_verification_expiry_email', '--dry-run') logger.check( (LOGGER_NAME, 'INFO', u"For the date range {} - {}, total Software Secure Photo verification filtered are {}" .format(start_date.date(), now().date(), count) ), (LOGGER_NAME, 'INFO', u"This was a dry run, no email was sent. For the actual run email would have been sent " u"to {} learner(s)".format(count) )) self.assertEqual(len(mail.outbox), 0)
def generate_plan(wave, hours, minutes): goal_time = datetime.timedelta( hours=hours, minutes=minutes, ) goal_pace_sec = goal_time.seconds / RACE_DIST start_time = wave_starts[wave] goal_finish = start_time + goal_time beer_time = goal_finish + datetime.timedelta(hours=1) for details in viewing_points.values(): eta = start_time + datetime.timedelta(seconds=details["Mile"] * goal_pace_sec) details["ETA"] = eta.strftime("%I:%M %p") min_between_2_3 = (viewing_points[3]["Mile"] - viewing_points[2]["Mile"]) * goal_pace_sec / 60 viewing_points[2]["Notes"] = viewing_points[2]["Notes"].replace( "$BETWEEN", str(int(min_between_2_3)) ) friend_link = "http://nycmarathon.aws.af.cm/?" + urlencode( {"wave": wave, "hours": hours, "minutes": minutes} ) #write_kml(viewing_points) return (start_time, goal_pace_sec, goal_finish, beer_time, viewing_points, friend_link)
def checkday(self, day): """ Check the day of unit Input: day - datetime/date, tested day Return: value of work unit or None """ if type(day) == datetime: day = day.date() if day in self.days: lgood = self.days[day].gettimetuple() lbad = [] if day.weekday() in self.pauses: thStart = datetime.combine(day, self.pauses[day.weekday()].time) thEnd = thStart + timedelta(seconds = (self.pauses[day.weekday()].duration*60)) lbad.append((thStart, thEnd)) if day in self.incidents: for inc in self.incidents[day]: dtStart = inc.datetime dtEnd = dtStart + timedelta(minutes = inc.duration) lbad.append((dtStart, dtEnd)) lbad = segment.union(lbad) result = (lgood[1] - lgood[0]).total_seconds()/60 for l in lbad: z = segment.getintersection(lgood, l) if z: result -= (z[1] - z[0]).total_seconds()/60 return result
def InformacionGeneral(): fecha=datetime.now().strftime('%H:%M:%S %d/%m/%Y ') resultado="####---"+fecha+"---####\n" resultado+="--Sensores---------------------------------------\n" resultado+="| "+humedad1._nombre+" -> "+humedad1.get("datoImpreso")+" \n" resultado+="| "+humedad2._nombre+" -> "+humedad2.get("datoImpreso")+" \n" resultado+="| "+humedad3._nombre+" -> "+humedad3.get("datoImpreso")+" \n" # resultado+="| "+ph1._nombre+" -> "+str(fn.ConvertirPH(ph1))+"/14\n" # resultado+="| "+electro1._nombre+" -> "+str(fn.ConvertirEC(electro1))+" s/m\n" # resultado+="| "+solar._nombre+" -> "+str(solar.get("datoDigital"))+"/"+str(solar.get("totalDigital"))+"\n" # resultado+="| "+luzinterior._nombre+" -> "+str(luzinterior.get("datoDigital"))+"/"+str(luzinterior.get("totalDigital"))+"\n" resultado+="-------------------------------------------------\n\n" resultado+="--Controladores----------------------------------\n" resultado+="| "+bombaAgua._nombre+" -> "+str(bombaAgua._estado)+" \n" # resultado+="| "+iluminacion._nombre+" -> "+str(iluminacion._estado)+"\n" resultado+="-------------------------------------------------\n\n" if(sistemaRiego._riegoActivo): resultado+="--Tiempo de Riego--------------------------------\n" resultado+="| Tiempo regado -> "+str(timedelta(seconds=sistemaRiego._tiempoRiego))+"\n" resultado+="| Tiempo restante -> "+str(timedelta(seconds=(op.TIEMPO_DE_RIEGO-sistemaRiego._tiempoRiego)))+" \n" resultado+="-------------------------------------------------\n\n" # if(len(op.ERRORES)): # resultado+="--Errores: --------------------------------\n" # for error in op.ERRORES: # resultado+=error return resultado
def expire_date(days=30): return date.today()+timedelta(days=days)
def run_name(params): """ This is the function to actually run NAME :param params: input parameters :return: names of the output dir and zipped file """ # replace any white space in title with underscores params['title'] = params['title'].replace(' ', '_') params['title'] = params['title'].replace(',', '') params['title'] = params['title'].replace('(', '') params['title'] = params['title'].replace(')', '') params['runid'] = "{}_{}".format( datetime.strftime(params['startdate'], "%Y-%m-%d"), datetime.strftime(params['enddate'], "%Y-%m-%d")) params['outputdir'] = os.path.join(params['outputdir'], params['runid']) if os.path.exists(params['outputdir']): shutil.rmtree(params['outputdir']) os.makedirs(params['outputdir']) os.makedirs(os.path.join(params['outputdir'], 'inputs')) os.makedirs(os.path.join(params['outputdir'], 'outputs')) # Will write a file that lists all the input parameters with open(os.path.join(params['outputdir'], 'user_input_parameters.txt'), 'w') as ins: for p in sorted(params): if p == 'outputdir': continue ins.write("%s: %s\n" % (p, params[p])) # Will loop through all the dates in range, including the final day for i, cur_date in enumerate( daterange(params['startdate'], params['enddate'] + timedelta(days=1))): os.makedirs( os.path.join(params['outputdir'], 'met_data', "input{}".format(i + 1))) with open( os.path.join(params['outputdir'], "inputs", "input{}.txt".format(i + 1)), 'w') as fout: fout.write(generate_inputfile(params, cur_date, i + 1)) with open(os.path.join(params['outputdir'], 'script.bsub'), 'w') as fout: fout.write(write_file(params, i + 1)) print("Running NAME...") cat = subprocess.Popen( ['cat', os.path.join(params['outputdir'], 'script.bsub')], stdout=subprocess.PIPE) runbsub = subprocess.Popen('bsub', stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=cat.stdout) sout, serr = runbsub.communicate() jobid = sout.split(' ')[1].replace('>', '').replace('<', '') jobrunning = True while jobrunning: time.sleep(30) checkjob = subprocess.check_output('bjobs') if jobid in checkjob: processesrunning = 0 for l in checkjob.split('\n'): if jobid in l: processesrunning += 1 percentcomplete = (( (i + 1) - processesrunning) / float(i + 1)) * 100 print "{0:.1f}%".format(percentcomplete) else: jobrunning = False print("\nDone") return params['runid']
def place_order(request): """ This is a page for a form for making an order. """ # CURRENTLY ONLY GET PRIMARY ADDRESS. if 'locale' not in request.session: request.session['locale'] = 'en_US' locale = request.session["locale"] user_addresses = request.user.shipping_addresses.filter(primary=True) if request.method == 'POST': form = OrderForm(request.POST, request.FILES, locale=locale) shipping_address_form = ShippingAddressForm(request.POST, locale=locale) if form.is_valid() and (shipping_address_form.is_valid() or len(user_addresses) > 0): # First create a shipping address if user has none if not user_addresses or shipping_address_form.is_valid(): data_dict = { 'user': request.user, 'primary': True, 'address_type': ShippingAddress.AddressType.RECEIVER_ADDRESS, **shipping_address_form.cleaned_data, } qset = ShippingAddress.objects.filter(user=request.user) shipping_address = ShippingAddress.objects.create( **data_dict, ) for user_address in user_addresses: user_address.primary = False user_address.save() else: shipping_address = ShippingAddress.objects.get(pk=request.POST['shipping_address']) if "item_image" in form.cleaned_data: item_image = form.cleaned_data["item_image"] else: item_image = None bid_end_datetime = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) + \ datetime.timedelta(hours=int(form.cleaned_data['num_hours'])) data_dict = { 'url': form.cleaned_data['url'], 'item_image': form.cleaned_data['item_image'], 'merchandise_type': form.cleaned_data['merchandise_type'], 'quantity': int(form.cleaned_data['quantity']), 'size': form.cleaned_data['size'], 'color': form.cleaned_data['color'], 'description': form.cleaned_data['description'], 'receiver': request.user, 'receiver_address': shipping_address, 'bid_end_datetime': bid_end_datetime, 'estimated_weight': 0, } order = create_order(**{'data': data_dict}) action = OrderAction.objects.create( order=order, action=OrderAction.Action.ORDER_PLACED, ) order.latest_action = action order.save() return redirect('friendship:order_details', order_id=order.id) else: pass else: if 'url' in request.GET: form = OrderForm(initial={'url': request.GET['url']}, locale=locale) else: form = OrderForm(initial={}, locale=locale) if not user_addresses: shipping_address_form = ShippingAddressForm(locale=locale) else: shipping_address_form = ShippingAddressForm(instance=user_addresses[0], locale=locale) return render( request, 'friendship/place_order.html', { 'form': form, 'shipping_address_form': shipping_address_form, 'user_addresses': user_addresses, 'overlay': True, } )
sc = SparkContext(conf = conf) sc.setLogLevel('WARN') if len(sys.argv) > 5: if sys.argv[5] == "hive": sqlContext = HiveContext(sc) else: sqlContext = SQLContext(sc) hdfs = sys.argv[3] dbname = sys.argv[4] #处理需要使用的日期 etl_date = sys.argv[1] #etl日期 V_DT = etl_date #上一日日期 V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d") #月初日期 V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d") #上月末日期 V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d") #10位日期 V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime("%Y-%m-%d") V_STEP = 0 OCRM_A_RESOURCE_EBANK_TARGET = sqlContext.read.parquet(hdfs+'/OCRM_A_RESOURCE_EBANK_TARGET/*') OCRM_A_RESOURCE_EBANK_TARGET.registerTempTable("OCRM_A_RESOURCE_EBANK_TARGET") #任务[21] 001-01:: V_STEP = V_STEP + 1 sql = """
class ErrorReader(object): @staticmethod def read_error_files(file_names: Iterable[str]) -> List[ErrorEntry]: error_entries = [] for file_name in sorted(file_names): print(f'Reading {file_name}') with open(file_name) as file: for error_line in file.readlines(): error_entry = ErrorReader.__parse_line(error_line) if error_entry: error_entries.append(error_entry) return error_entries @staticmethod def __parse_line(line: str) -> Optional[ErrorEntry]: """Converts a line from an Apache log file into a LogEntry.""" match = re.match(ERROR_PATTERN, line) if not match: return None time_string, _, _, _, location, rest = match.groups() # [Fri Dec 16 01:46:23 2005] host_ip = ipaddress.ip_address(location) time = datetime.datetime.strptime(time_string, '%a %b %d %H:%M:%S.%f %Y') match = re.match(TEXT_ERROR_PATTERN, rest) if match: time_string2, severity, code_location, message = match.groups() time2 = datetime.datetime.strptime(time_string2, '%d/%b/%Y %H:%M:%S') assert time2 == time.replace(microsecond=0, tzinfo=None) return ErrorEntry(time=time, host_ip=host_ip, message=message, code_location=code_location, severity=severity) else: return ErrorEntry(time=time, host_ip=host_ip, message=rest) def run(self) -> None: error_entries = self.get_error_entries() error_entries.sort(key=attrgetter('host_ip', 'time')) errors_by_host_ip = [(host_ip, list(error_entries)) for host_ip, error_entries in itertools.groupby( error_entries, attrgetter("host_ip"))] log_entries = self.get_log_entries() log_entries.sort(key=attrgetter('host_ip', 'time')) log_entries_by_host_ip = { host_ip: list(log_entries) for host_ip, log_entries in itertools.groupby( log_entries, attrgetter('host_ip')) } for host_ip, error_entries in errors_by_host_ip: log_entries = log_entries_by_host_ip.get(host_ip, []) self.check_one_ip(host_ip, error_entries, log_entries) def get_error_entries(self) -> List[ErrorEntry]: files = glob.glob("/users/fy/SETI/logs/tools.pds_error_log-201?-??-??") local_network = ipaddress.ip_network("10.1.0.0/24") def keep_error_entry(entry: ErrorEntry) -> bool: if entry.host_ip in local_network: return False if entry.severity and entry.message.startswith('Not Found: '): return False return True error_entries = ErrorReader.read_error_files(files) return list(filter(keep_error_entry, error_entries)) def get_log_entries(self) -> List[LogEntry]: def keep_log_entry(entry: LogEntry) -> bool: if entry.status == 200 and entry.url.path.startswith( '/static_media'): return False return True log_entries = LogReader.read_logs( glob.glob("/users/fy/SETI/logs/tools.pds_access_log-*")) return list(filter(keep_log_entry, log_entries)) ERROR_LEEWAY = datetime.timedelta(milliseconds=200) def check_one_ip(self, host_ip: str, error_entries: List[ErrorEntry], log_entries: List[LogEntry]) -> None: print(f'Host {host_ip}') error_entries_deque = deque(error_entries) log_entry_dates = [ log_entry.time.replace(tzinfo=None) for log_entry in log_entries ] while error_entries_deque: # Any set of error logs on the same host, in which each log is separated from the previous one by less than # 200ms is a single set of error logs these_error_entries = [error_entries_deque.popleft()] while (error_entries_deque and error_entries_deque[0].time - these_error_entries[-1].time < self.ERROR_LEEWAY): these_error_entries.append(error_entries_deque.popleft()) start_time = these_error_entries[0].time.replace(microsecond=0) end_time = these_error_entries[-1].time left = bisect_left(log_entry_dates, start_time) right = bisect_right(log_entry_dates, end_time) if left < right and left != len(log_entry_dates): for i in range(left, right): print( f'{log_entries[i].time} {log_entries[i].url.geturl()}' ) else: print("No log entries found") for entry in these_error_entries: if entry.severity: print( f'{entry.time}: {entry.severity} {entry.code_location} {entry.message}' ) else: print(f'{entry.time}: {entry.message}') print()
"reporting_port": os.environ.get( "JAEGER_AGENT_PORT", jaeger_client.config.DEFAULT_REPORTING_PORT ), "reporting_host": os.environ.get("JAEGER_AGENT_HOST"), }, "logging": get_bool_from_env("JAEGER_LOGGING", False), }, service_name="saleor", validate=True, ).initialize_tracer() # Some cloud providers (Heroku) export REDIS_URL variable instead of CACHE_URL REDIS_URL = os.environ.get("REDIS_URL") if REDIS_URL: CACHE_URL = os.environ.setdefault("CACHE_URL", REDIS_URL) CACHES = {"default": django_cache_url.config()} # Default False because storefront and dashboard don't support expiration of token JWT_EXPIRE = get_bool_from_env("JWT_EXPIRE", False) JWT_TTL_ACCESS = timedelta(seconds=parse(os.environ.get("JWT_TTL_ACCESS", "5 minutes"))) JWT_TTL_APP_ACCESS = timedelta( seconds=parse(os.environ.get("JWT_TTL_APP_ACCESS", "5 minutes")) ) JWT_TTL_REFRESH = timedelta(seconds=parse(os.environ.get("JWT_TTL_REFRESH", "30 days"))) JWT_TTL_REQUEST_EMAIL_CHANGE = timedelta( seconds=parse(os.environ.get("JWT_TTL_REQUEST_EMAIL_CHANGE", "1 hour")), )
def save(self, *args, **kwargs): if not self.key: self.key = self.generate_key() self.expired = datetime.now() + timedelta(minutes=30) return super().save(*args, **kwargs)
import datetime import calendar import time # year-month-day object d = datetime.date(2016, 7, 24) print(d) today = datetime.date.today() print(today) print(today.day) print(today.year) print(today.weekday()) # monday=0 sunday=6 print(today.isoweekday()) # monday=1 sunday=7 deltime = datetime.timedelta(days=7) print(today + deltime) # future date print(today - deltime) # past date birthday = datetime.date(2019, 10, 26) time_until_birthday = birthday - today print(time_until_birthday) print(time_until_birthday.days) print(time_until_birthday.total_seconds()) # hour-min-sec-mic object t = datetime.time(9, 30, 45, 50000) print(t) # datetime object
df_2017[pd.datetime(2015, 1, 1)] = 0 df_2017[pd.datetime(2015, 7, 7)] = 0 df_2017[pd.datetime(2014, 1, 1)] = 0 df_2017[pd.datetime(2013, 1, 1)] = 0 # promo_2017[pd.datetime(2015, 7, 7)] = 0 ########################################################################## logger.info('Preparing traing dataset...') X_l, y_l = [], [] # Add train data on Aug 2014 and Aug 2015 logger.info('Preparing 2014 training dataset...') t2014 = date(2014, 8, 6) for i in range(4): delta = timedelta(days=7 * i) X_tmp, y_tmp = prepare_dataset(t2014 + delta) X_l.append(X_tmp) y_l.append(y_tmp) logger.info('Preparing 2015 training dataset...') t2015 = date(2015, 8, 5) for i in range(4): delta = timedelta(days=7 * i) X_tmp, y_tmp = prepare_dataset(t2015 + delta) X_l.append(X_tmp) y_l.append(y_tmp) logger.info('Preparing 2016 training dataset...') t2016 = date(2016, 8, 3) for i in range(4):
from datetime import timedelta import requests import requests_cache from requests_oauthlib import OAuth1 from rest_framework import status from rest_framework.decorators import api_view from rest_framework.response import Response from api.commonresponses import DOWNSTREAM_ERROR_RESPONSE from api.models import City from api.modules.twitter.constants import TWITTER_CONSUMER_KEY, TWITTER_OAUTH_TOKEN_SECRET, TWITTER_OAUTH_TOKEN, \ TWITTER_CONSUMER_SECRET, TWITTER_TRENDS_URL, TWITTER_SEARCH_URL from api.modules.twitter.twitter_response import SearchTweetResponse hour_difference = timedelta(hours=1) requests_cache.install_cache(expire_after=hour_difference) @api_view(['GET']) def get_city_trends(request, city_id): """ Returns a list of top trending tweets in the given city :param request: :param city_id: :return: 404 if invalid city id is sent :return: 503 if Twitter API request fails :return: 200 successful """ try: city = City.objects.get(pk=city_id)
class Product(models.Model): ''' Products make up the conference inventory. Attributes: name (str): The display name for the product. description (str): Some descriptive text that will help the user to understand the product when they're at the registration form. category (Category): The Category that this product will be grouped under. price (Decimal): The price that 1 unit of this product will sell for. Note that this should be the full price, before any discounts are applied. limit_per_user (Optional[int]): This restricts the number of this Product that each attendee may claim. This extends across multiple Invoices. reservation_duration (datetime): When a Product is added to the user's tentative registration, it is marked as unavailable for a period of time. This allows the user to build up their registration and then pay for it. This reservation duration determines how long an item should be allowed to be reserved whilst being unpaid. order (int): An ascending order for displaying the Products within each Category. ''' class Meta: app_label = "registrasion" verbose_name = _("inventory - product") ordering = ("category__order", "order") def __str__(self): return "%s - %s" % (self.category.name, self.name) name = models.CharField( max_length=255, verbose_name=_("Name"), ) description = models.TextField( verbose_name=_("Description"), null=True, blank=True, ) category = models.ForeignKey( Category, verbose_name=_("Product category") ) price = models.DecimalField( max_digits=8, decimal_places=2, verbose_name=_("Price"), ) limit_per_user = models.PositiveIntegerField( null=True, blank=True, verbose_name=_("Limit per user"), ) reservation_duration = models.DurationField( default=datetime.timedelta(hours=1), verbose_name=_("Reservation duration"), help_text=_("The length of time this product will be reserved before " "it is released for someone else to purchase."), ) order = models.PositiveIntegerField( verbose_name=("Display order"), db_index=True, )
def get_timespan(df, dt, minus, periods, freq='D'): return df[pd.date_range(dt - timedelta(days=minus), periods=periods, freq=freq)]
import flatbuffers import websockets from rlbot.botmanager.agent_metadata import AgentMetadata from rlbot.botmanager.bot_helper_process import BotHelperProcess from rlbot.messages.flat import GameTickPacket, ControllerState, PlayerInput, TinyPacket, TinyPlayer, Vector3, Rotator, \ TinyBall from rlbot.utils.logging_utils import get_logger from rlbot.utils.structures.game_interface import GameInterface from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.support.wait import WebDriverWait MAX_AGENT_CALL_PERIOD = timedelta(seconds=1.0) def index_to_player_string(idx): return str(idx + 1) class ScratchManager(BotHelperProcess): def __init__(self, agent_metadata_queue, quit_event, options): super().__init__(agent_metadata_queue, quit_event, options) self.logger = get_logger('scratch_mgr') self.game_interface = GameInterface(self.logger) self.current_sockets = set() self.running_indices = set() self.port: int = options['port'] self.sb3_file = options['sb3-file']
'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated', 'utils.permissions.AdapiPermissions'), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', 'rest_framework.authentication.BasicAuthentication', 'rest_framework.authentication.SessionAuthentication', # 'rest_framework.authentication.TokenAuthentication', ), 'UNICODE_JSON': False, } import datetime JWT_AUTH = { "JWT_EXPIRATION_DELTA": datetime.timedelta(days=0, hours=2, seconds=0), 'JWT_AUTH_HEADER_PREFIX': 'JWT', } # MIDDLEWARE = [ # 'django.middleware.security.SecurityMiddleware', # 'django.contrib.sessions.middleware.SessionMiddleware', # 'django.middleware.common.CommonMiddleware', # 'django.middleware.csrf.CsrfViewMiddleware', # 'django.contrib.auth.middleware.AuthenticationMiddleware', # 'django.contrib.messages.middleware.MessageMiddleware', # 'django.middleware.clickjacking.XFrameOptionsMiddleware', # ] FILE_UPLOAD_HANDLERS = [ 'django.core.files.uploadhandler.MemoryFileUploadHandler',
def get_daily_dex_volume_for_given_pair( limit: int = 100, token: str = "UNI", vs: str = "USDT", ) -> pd.DataFrame: """Get daily volume for given pair [Source: https://graphql.bitquery.io/] Parameters ------- limit: int Last n days to query data token: str ERC20 token symbol vs: str Quote currency. Returns ------- pd.DataFrame Daily volume for given pair """ dt = (datetime.date.today() - datetime.timedelta(min(limit, 365))).strftime( "%Y-%m-%d" ) base, quote = find_token_address(token), find_token_address(vs) if not base or not quote: raise ValueError("Provided coin or quote currency doesn't exist\n") query = f""" {{ ethereum(network: ethereum) {{ dexTrades( options: {{desc: ["timeInterval.day", "trades"]}} baseCurrency: {{is: "{base}"}} quoteCurrency: {{is: "{quote}"}} date: {{since: "{dt}" }} ) {{ timeInterval {{ day(count: 1) }} baseCurrency {{ symbol }} quoteCurrency {{ symbol }} exchange {{ fullName }} trades: count tradeAmount(in: USD) quotePrice maximum_price: quotePrice(calculate: maximum) minimum_price: quotePrice(calculate: minimum) open_price: minimum(of: block, get: quote_price) close_price: maximum(of: block, get: quote_price) }} }} }} """ try: data = query_graph(BQ_URL, query) except BitQueryApiKeyException: logger.exception("Invalid API Key") console.print("[red]Invalid API Key[/red]\n") return pd.DataFrame() if not data: return pd.DataFrame() df = _extract_dex_trades(data) df.columns = [ "trades", "tradeAmount", "price", "high", "low", "open", "close", "date", "base", "quote", "exchange", ] return df[ [ "date", "exchange", "base", "quote", "open", "high", "low", "close", "tradeAmount", "trades", ] ].sort_values(by="date", ascending=False)
# NYC Lat long # indices longitude 96, latitude 49 # ----------- # columns = ['rel_hum', 'sp_hum', 'abs_hum', 'u_wind', 'v_wind', 'temp'] dbi_search = mongoDBI.mongoDBI(db_name=database_name) feature = "rel_hum" num_rec_per_day = 4 key_label = "date_idx" key_contents = "2009-01-03_1" value_label = "data" start_date = date(2002, 1, 1) end_date = date(2017, 3, 31) time_increment = timedelta(days=1) # Store data in data frame df = pd.DataFrame(columns=columns) current_date = start_date # Loop through each date while True: # Loop through 4 records for idx in range(1, num_rec_per_day + 1): key_contents = str(current_date) + '_' + str(idx) cur_res = {} skip = False # Loop though the columns for c in columns:
# Internationalization # https://docs.djangoproject.com/en/3.1/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.1/howto/static-files/ STATIC_URL = '/static/' AUTHENTICATION_BACKENDS = ['tembici.authentication.EmailBackend'] REST_FRAMEWORK = { 'DEFAULT_AUTHENTICATION_CLASSES': ('rest_framework_simplejwt.authentication.JWTAuthentication', ) } SIMPLE_JWT = { 'SIGNING_KEY': os.getenv("SECRET_KEY"), 'ACCESS_TOKEN_LIFETIME': timedelta(days=1), 'REFRESH_TOKEN_LIFETIME': timedelta(days=3), 'AUTH_HEADER_TYPES': ('Bearer', 'JWT'), }
def get_spread_for_crypto_pair( token: str = "WETH", vs: str = "USDT", limit: int = 30 ) -> pd.DataFrame: """Get an average bid and ask prices, average spread for given crypto pair for chosen time period. [Source: https://graphql.bitquery.io/] Parameters ---------- limit: int Last n days to query data token: str ERC20 token symbol vs: str Quoted currency. Returns ------- pd.DataFrame Average bid and ask prices, spread for given crypto pair for chosen time period """ dt = (datetime.date.today() - datetime.timedelta(limit)).strftime("%Y-%m-%d") base, quote = find_token_address(token), find_token_address(vs) if not base or not quote: raise ValueError("Provided coin or quote currency doesn't exist\n") query = f""" {{ ethereum(network: ethereum){{ dexTrades( date: {{since:"{dt}"}} baseCurrency: {{is: "{base}"}}, quoteCurrency: {{is: "{quote}"}}) {{ date {{date}} baseCurrency {{symbol}} baseAmount quoteCurrency {{ symbol }} quoteAmount trades: count quotePrice side }} }} }} """ try: data = query_graph(BQ_URL, query) except BitQueryApiKeyException: logger.exception("Invalid API Key") console.print("[red]Invalid API Key[/red]\n") return pd.DataFrame() if not data: return pd.DataFrame() df = _extract_dex_trades(data) columns = ["quotePrice", "date.date", "baseCurrency.symbol", "quoteCurrency.symbol"] bids = df.query("side == 'SELL'")[columns] asks = df.query("side == 'BUY'")[columns] bids.columns = ["averageBidPrice", "date", "baseCurrency", "quoteCurrency"] asks.columns = ["averageAskPrice", "date", "baseCurrency", "quoteCurrency"] daily_spread = pd.merge(asks, bids, on=["date", "baseCurrency", "quoteCurrency"]) daily_spread["dailySpread"] = abs( daily_spread["averageBidPrice"] - daily_spread["averageAskPrice"] ) return daily_spread[ [ "date", "baseCurrency", "quoteCurrency", "dailySpread", "averageBidPrice", "averageAskPrice", ] ].sort_values(by="date", ascending=True)
def ASAP(requestContext, seriesList, resolution=1000): ''' use the ASAP smoothing on a series https://arxiv.org/pdf/1703.00983.pdf https://raw.githubusercontent.com/stanford-futuredata/ASAP/master/ASAP.py :param requestContext: :param seriesList: :param resolution: either number of points to keep or a time resolution :return: smoothed(seriesList) ''' if not seriesList: return [] windowInterval = None if isinstance(resolution, six.string_types): delta = parseTimeOffset(resolution) windowInterval = abs(delta.seconds + (delta.days * 86400)) if windowInterval: previewSeconds = windowInterval else: previewSeconds = max([s.step for s in seriesList]) * int(resolution) # ignore original data and pull new, including our preview # data from earlier is needed to calculate the early results newContext = requestContext.copy() newContext['startTime'] = (requestContext['startTime'] - timedelta(seconds=previewSeconds)) previewList = evaluateTokens(newContext, requestContext['args'][0]) result = [] for series in previewList: if windowInterval: # the resolution here is really the number of points to maintain # so we need to convert the "seconds" to num points windowPoints = round((series.end - series.start) / windowInterval) else: use_res = int(resolution) if len(series) < use_res: use_res = len(series) windowPoints = use_res if isinstance(resolution, six.string_types): newName = 'asap(%s,"%s")' % (series.name, resolution) else: newName = "asap(%s,%s)" % (series.name, resolution) step_guess = (series.end - series.start) // windowPoints newSeries = TimeSeries( newName, series.start, series.end, step_guess, [] ) newSeries.pathExpression = newName # detect "none" lists if len([v for v in series if v is not None]) <= 1: newSeries.extend(series) else: # the "resolution" is a suggestion, # the algo will alter it some inorder # to get the best view for things new_s = smooth(series, windowPoints) # steps need to be ints, so we must force the issue new_step = round((series.end - series.start) / len(new_s)) newSeries.step = new_step newSeries.extend(new_s) result.append(newSeries) return result
from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Activation, Dropout, Flatten from tensorflow.keras.layers import LSTM from tensorflow.keras.callbacks import ModelCheckpoint from tensorflow.keras.models import load_model import time from sklearn.metrics import mean_squared_error import matplotlib.pyplot as plt from sklearn.preprocessing import MinMaxScaler from sklearn.preprocessing import StandardScaler from datetime import date, datetime, timedelta import psycopg2 import json import ast today = date.today() + timedelta(days=2) #today = date.today()-timedelta(days=4) print("Today", today) ddttt = pd.Timestamp(today) ddtttf = ddttt.strftime("%Y-%m-%d") weekd = today.weekday() #Week Day of Next Day print("Weekday", weekd) #Previous Day for getting list of Dates for fetching training data previousday = ddttt + timedelta(days=-1) previousdayf = previousday.strftime("%Y-%m-%d") print("Previous Day", previousdayf) #Start Date for the Data Samples startdate = pd.Timestamp(previousdayf) dayfirst = startdate + timedelta(days=-366) #print ("Start Date",dayfirst.strftime("%Y-%m-%d"))
lat=nc.variables['lat'][:] nnodes=len(lon) # read connectivity array nv=nc.variables['nv'][:,:].T # transpose to get (ncells,3) [ncells,three]=np.shape(nv) nv=nv-1 # python is 0-based # now read data at nodes: # read water depth at nodes h=nc.variables['h'][:] # find time index to read hours_from_now=0 # Examples: 0=>nowcast, 3 => forecast 3 hours from now, etc. date=datetime.datetime.utcnow()+datetime.timedelta(0,3600*hours_from_now) #date=datetime.datetime(2011,9,9,17,00) # specific time (UTC) tindex=netCDF4.date2index(date,nc.variables['time'],select='nearest') # read water level at nodes at a specific time #z=nc.variables['zeta'][-1,:] # -1 is the last time step of the forecast #z=nc.variables['zeta'][tindex,:] # index for date specified above # read significant wave height at nodes at specified time step #z=nc.variables['hs'][tindex,:] # Note: 'hs' is only in GOM3 wave model # write Shapefile using pyshp # Test 1. Write polygons for each triangle, and create record # values for each triangle that are the average of the 3 nodal values # (since depth and water level are defined at nodes)
MC = mongoConnection(mongoAuth, db, collectionForBlocks) # Find Last unixTime value in a working json file; lU = PG.parseTransactionsFindLastValue() if lU == 'FileWasEmpty!': lU = PG.parseTransactionsFindLastValue() print "Warning, file was empty, init zero params!" # Find the same but in MongoDB; lastBlockByUnixTime = MC.findLastBlockTime(collectionForBlocks, lU) # Last Block value in mongoDB; findLastBlock = MC.findLastBlock(collectionForBlocks) # Init Global while vars; nextDayTime = (datetime.fromtimestamp(float(lU)) + timedelta(hours=24)).strftime('%Y-%m-%d') # Increase 1 day; sumTxs = 0 nextDayTimeWhileProgress = nextDayTime whileprogress = lastBlockByUnixTime while whileprogress <= findLastBlock: lB = MC.findByBlock(collectionForBlocks, whileprogress) if lB != []: # This should never happen! count = len(lB['tx']) unixTime = lB['time'] reqNum = int(count) currBlkTime = (datetime.fromtimestamp(unixTime)).strftime('%Y-%m-%d') timeSet = strftime("%Y-%m-%d %H:%M:%S", gmtime()) # This should never happen. But if the blockchain stopped for more than 24h?
def was_published_recently(self): return self.pub_date >= timezone.now() - datetime.timedelta(days=1)
def __add__(self, other): if isinstance(other, relativedelta): return self.__class__(years=other.years + self.years, months=other.months + self.months, days=other.days + self.days, hours=other.hours + self.hours, minutes=other.minutes + self.minutes, seconds=other.seconds + self.seconds, microseconds=(other.microseconds + self.microseconds), leapdays=other.leapdays or self.leapdays, year=(other.year if other.year is not None else self.year), month=(other.month if other.month is not None else self.month), day=(other.day if other.day is not None else self.day), weekday=(other.weekday if other.weekday is not None else self.weekday), hour=(other.hour if other.hour is not None else self.hour), minute=(other.minute if other.minute is not None else self.minute), second=(other.second if other.second is not None else self.second), microsecond=(other.microsecond if other.microsecond is not None else self.microsecond)) if isinstance(other, datetime.timedelta): return self.__class__(years=self.years, months=self.months, days=self.days + other.days, hours=self.hours, minutes=self.minutes, seconds=self.seconds + other.seconds, microseconds=self.microseconds + other.microseconds, leapdays=self.leapdays, year=self.year, month=self.month, day=self.day, weekday=self.weekday, hour=self.hour, minute=self.minute, second=self.second, microsecond=self.microsecond) if not isinstance(other, datetime.date): return NotImplemented elif self._has_time and not isinstance(other, datetime.datetime): other = datetime.datetime.fromordinal(other.toordinal()) year = (self.year or other.year)+self.years month = self.month or other.month if self.months: assert 1 <= abs(self.months) <= 12 month += self.months if month > 12: year += 1 month -= 12 elif month < 1: year -= 1 month += 12 day = min(calendar.monthrange(year, month)[1], self.day or other.day) repl = {"year": year, "month": month, "day": day} for attr in ["hour", "minute", "second", "microsecond"]: value = getattr(self, attr) if value is not None: repl[attr] = value days = self.days if self.leapdays and month > 2 and calendar.isleap(year): days += self.leapdays ret = (other.replace(**repl) + datetime.timedelta(days=days, hours=self.hours, minutes=self.minutes, seconds=self.seconds, microseconds=self.microseconds)) if self.weekday: weekday, nth = self.weekday.weekday, self.weekday.n or 1 jumpdays = (abs(nth) - 1) * 7 if nth > 0: jumpdays += (7 - ret.weekday() + weekday) % 7 else: jumpdays += (ret.weekday() - weekday) % 7 jumpdays *= -1 ret += datetime.timedelta(days=jumpdays) return ret
uname = None if options.ids else user uid = long(user) if options.ids else None u = lookup_user(db, uid, uname) if verbose(): sys.stderr.write("{} followers\n".format(u['screen_name_lower'])) userids = set(get_followers(db, u['id'])) if options.common: if commonusers: commonusers = commonusers & userids else: commonusers = userids else: creationdates += creation_distribution(db, userids) else: users = [lookup_user(db, long(user) if options.ids else None, None if options.ids else user) for user in userlist] userids = [u['id'] for u in users if u is not None and 'id' in u] #print userids creationdates = creation_distribution(db, userids) if options.common: for uid in commonusers: print uid else: print u'Date,Count' d = min(creationdates) end = max(creationdates) while d < end: cnt = creationdates[d] print u'{},'.format(d.date()), print u'{}'.format(cnt) d += timedelta(days=1)
def test_timedelta(self): x = datetime.timedelta(days=5, hours=2, minutes=10) self.assertEqual(convert(x), "INTERVAL '439800' SECOND")
def sourceAlive(repoData: dict[Any, Any], lifespan: int) -> bool: """Is source repo alive?""" return getDatetime(repoData["pushedAt"]) > ( datetime.datetime.now() - datetime.timedelta(weeks=lifespan) )
def contribution(row, status): """ Convert row to dictionary, only selecting values we want. """ contribution = Contribution(row) url = contribution.url if url == "": return if contribution.staff_pick.lower() == "yes": staff_picked = True else: staff_picked = False try: review_date = parse(contribution.review_date) except Exception: review_date = datetime(1970, 1, 1) if (datetime.now() - review_date).days > 7 and status != "unreviewed": return total_payout = 0 # Check if post deleted try: comment = Comment(url) except Exception: return if contribution.review_status == "Pending": for reply in comment.get_replies(): if reply.author == contribution.moderator: review_date = reply["created"] comment_url = reply.permlink break else: review_date = datetime(1970, 1, 1) comment_url = "" else: comment_url = "" # Calculate total (pending) payout of contribution if comment.time_elapsed() > timedelta(days=7): total_payout = Amount(comment.json()["total_payout_value"]).amount else: total_payout = Amount(comment.json()["pending_payout_value"]).amount # Get votes, comments and author votes = comment.json()["net_votes"] comments = comment.json()["children"] author = comment.author # Add status for unvoted and pending if contribution.vote_status == "Unvoted": status = "unvoted" elif contribution.vote_status == "Pending": status = "pending" # Check if contribution was voted on if (contribution.vote_status == "Yes" or contribution.category == "iamutopian"): voted_on = True try: utopian_vote = Vote(f"{comment.authorperm}|utopian-io").sbd except Exception: voted_on = False utopian_vote = 0 else: voted_on = False utopian_vote = 0 # Check for when contribution not reviewed if contribution.score == "": score = None else: try: score = float(contribution.score) except Exception: score = None # Create contribution dictionary and return it new_contribution = { "moderator": contribution.moderator.strip(), "author": author, "review_date": review_date, "url": url, "repository": contribution.repository, "category": contribution.category, "staff_picked": staff_picked, "picked_by": contribution.picked_by, "status": status, "score": score, "voted_on": voted_on, "total_payout": total_payout, "total_votes": votes, "total_comments": comments, "utopian_vote": utopian_vote, "created": comment["created"], "title": comment.title, "review_status": contribution.review_status.lower(), "comment_url": comment_url } return new_contribution
def last_completed_audit_period(unit=None): """This method gives you the most recently *completed* audit period. arguments: units: string, one of 'hour', 'day', 'month', 'year' Periods normally begin at the beginning (UTC) of the period unit (So a 'day' period begins at midnight UTC, a 'month' unit on the 1st, a 'year' on Jan, 1) unit string may be appended with an optional offset like so: 'day@18' This will begin the period at 18:00 UTC. 'month@15' starts a monthly period on the 15th, and year@3 begins a yearly one on March 1st. returns: 2 tuple of datetimes (begin, end) The begin timestamp of this audit period is the same as the end of the previous. """ if not unit: unit = CONF.volume_usage_audit_period offset = 0 if '@' in unit: unit, offset = unit.split("@", 1) offset = int(offset) rightnow = timeutils.utcnow() if unit not in ('month', 'day', 'year', 'hour'): raise ValueError('Time period must be hour, day, month or year') if unit == 'month': if offset == 0: offset = 1 end = datetime.datetime(day=offset, month=rightnow.month, year=rightnow.year) if end >= rightnow: year = rightnow.year if 1 >= rightnow.month: year -= 1 month = 12 + (rightnow.month - 1) else: month = rightnow.month - 1 end = datetime.datetime(day=offset, month=month, year=year) year = end.year if 1 >= end.month: year -= 1 month = 12 + (end.month - 1) else: month = end.month - 1 begin = datetime.datetime(day=offset, month=month, year=year) elif unit == 'year': if offset == 0: offset = 1 end = datetime.datetime(day=1, month=offset, year=rightnow.year) if end >= rightnow: end = datetime.datetime(day=1, month=offset, year=rightnow.year - 1) begin = datetime.datetime(day=1, month=offset, year=rightnow.year - 2) else: begin = datetime.datetime(day=1, month=offset, year=rightnow.year - 1) elif unit == 'day': end = datetime.datetime(hour=offset, day=rightnow.day, month=rightnow.month, year=rightnow.year) if end >= rightnow: end = end - datetime.timedelta(days=1) begin = end - datetime.timedelta(days=1) elif unit == 'hour': end = rightnow.replace(minute=offset, second=0, microsecond=0) if end >= rightnow: end = end - datetime.timedelta(hours=1) begin = end - datetime.timedelta(hours=1) return (begin, end)
class TestTimestampTZOperations(object): # -------------------------------------------------------------- # Timestamp.tz_localize def test_tz_localize_pushes_out_of_bounds(self): # GH#12677 # tz_localize that pushes away from the boundary is OK pac = Timestamp.min.tz_localize('US/Pacific') assert pac.value > Timestamp.min.value pac.tz_convert('Asia/Tokyo') # tz_convert doesn't change value with pytest.raises(OutOfBoundsDatetime): Timestamp.min.tz_localize('Asia/Tokyo') # tz_localize that pushes away from the boundary is OK tokyo = Timestamp.max.tz_localize('Asia/Tokyo') assert tokyo.value < Timestamp.max.value tokyo.tz_convert('US/Pacific') # tz_convert doesn't change value with pytest.raises(OutOfBoundsDatetime): Timestamp.max.tz_localize('US/Pacific') def test_tz_localize_ambiguous_bool(self): # make sure that we are correctly accepting bool values as ambiguous # GH#14402 ts = Timestamp('2015-11-01 01:00:03') expected0 = Timestamp('2015-11-01 01:00:03-0500', tz='US/Central') expected1 = Timestamp('2015-11-01 01:00:03-0600', tz='US/Central') with pytest.raises(pytz.AmbiguousTimeError): ts.tz_localize('US/Central') result = ts.tz_localize('US/Central', ambiguous=True) assert result == expected0 result = ts.tz_localize('US/Central', ambiguous=False) assert result == expected1 def test_tz_localize_ambiguous(self): ts = Timestamp('2014-11-02 01:00') ts_dst = ts.tz_localize('US/Eastern', ambiguous=True) ts_no_dst = ts.tz_localize('US/Eastern', ambiguous=False) assert (ts_no_dst.value - ts_dst.value) / 1e9 == 3600 with pytest.raises(ValueError): ts.tz_localize('US/Eastern', ambiguous='infer') # GH#8025 msg = ('Cannot localize tz-aware Timestamp, ' 'use tz_convert for conversions') with pytest.raises(TypeError, match=msg): Timestamp('2011-01-01', tz='US/Eastern').tz_localize('Asia/Tokyo') msg = ('Cannot convert tz-naive Timestamp, ' 'use tz_localize to localize') with pytest.raises(TypeError, match=msg): Timestamp('2011-01-01').tz_convert('Asia/Tokyo') @pytest.mark.parametrize('stamp, tz', [ ('2015-03-08 02:00', 'US/Eastern'), ('2015-03-08 02:30', 'US/Pacific'), ('2015-03-29 02:00', 'Europe/Paris'), ('2015-03-29 02:30', 'Europe/Belgrade')]) @pytest.mark.filterwarnings('ignore::FutureWarning') def test_tz_localize_nonexistent(self, stamp, tz): # GH#13057 ts = Timestamp(stamp) with pytest.raises(NonExistentTimeError): ts.tz_localize(tz) # GH 22644 with pytest.raises(NonExistentTimeError): with tm.assert_produces_warning(FutureWarning): ts.tz_localize(tz, errors='raise') with tm.assert_produces_warning(FutureWarning): assert ts.tz_localize(tz, errors='coerce') is NaT def test_tz_localize_errors_ambiguous(self): # GH#13057 ts = Timestamp('2015-11-1 01:00') with pytest.raises(AmbiguousTimeError): with tm.assert_produces_warning(FutureWarning): ts.tz_localize('US/Pacific', errors='coerce') @pytest.mark.filterwarnings('ignore::FutureWarning') def test_tz_localize_errors_invalid_arg(self): # GH 22644 tz = 'Europe/Warsaw' ts = Timestamp('2015-03-29 02:00:00') with pytest.raises(ValueError): with tm.assert_produces_warning(FutureWarning): ts.tz_localize(tz, errors='foo') def test_tz_localize_errors_coerce(self): # GH 22644 # make sure errors='coerce' gets mapped correctly to nonexistent tz = 'Europe/Warsaw' ts = Timestamp('2015-03-29 02:00:00') with tm.assert_produces_warning(FutureWarning): result = ts.tz_localize(tz, errors='coerce') expected = ts.tz_localize(tz, nonexistent='NaT') assert result is expected @pytest.mark.parametrize('stamp', ['2014-02-01 09:00', '2014-07-08 09:00', '2014-11-01 17:00', '2014-11-05 00:00']) def test_tz_localize_roundtrip(self, stamp, tz_aware_fixture): tz = tz_aware_fixture ts = Timestamp(stamp) localized = ts.tz_localize(tz) assert localized == Timestamp(stamp, tz=tz) with pytest.raises(TypeError): localized.tz_localize(tz) reset = localized.tz_localize(None) assert reset == ts assert reset.tzinfo is None def test_tz_localize_ambiguous_compat(self): # validate that pytz and dateutil are compat for dst # when the transition happens naive = Timestamp('2013-10-27 01:00:00') pytz_zone = 'Europe/London' dateutil_zone = 'dateutil/Europe/London' result_pytz = naive.tz_localize(pytz_zone, ambiguous=0) result_dateutil = naive.tz_localize(dateutil_zone, ambiguous=0) assert result_pytz.value == result_dateutil.value assert result_pytz.value == 1382835600000000000 if LooseVersion(dateutil.__version__) < LooseVersion('2.6.0'): # dateutil 2.6 buggy w.r.t. ambiguous=0 # see gh-14621 # see https://github.com/dateutil/dateutil/issues/321 assert (result_pytz.to_pydatetime().tzname() == result_dateutil.to_pydatetime().tzname()) assert str(result_pytz) == str(result_dateutil) elif LooseVersion(dateutil.__version__) > LooseVersion('2.6.0'): # fixed ambiguous behavior assert result_pytz.to_pydatetime().tzname() == 'GMT' assert result_dateutil.to_pydatetime().tzname() == 'BST' assert str(result_pytz) != str(result_dateutil) # 1 hour difference result_pytz = naive.tz_localize(pytz_zone, ambiguous=1) result_dateutil = naive.tz_localize(dateutil_zone, ambiguous=1) assert result_pytz.value == result_dateutil.value assert result_pytz.value == 1382832000000000000 # dateutil < 2.6 is buggy w.r.t. ambiguous timezones if LooseVersion(dateutil.__version__) > LooseVersion('2.5.3'): # see gh-14621 assert str(result_pytz) == str(result_dateutil) assert (result_pytz.to_pydatetime().tzname() == result_dateutil.to_pydatetime().tzname()) @pytest.mark.parametrize('tz', [pytz.timezone('US/Eastern'), gettz('US/Eastern'), 'US/Eastern', 'dateutil/US/Eastern']) def test_timestamp_tz_localize(self, tz): stamp = Timestamp('3/11/2012 04:00') result = stamp.tz_localize(tz) expected = Timestamp('3/11/2012 04:00', tz=tz) assert result.hour == expected.hour assert result == expected @pytest.mark.parametrize('start_ts, tz, end_ts, shift', [ ['2015-03-29 02:20:00', 'Europe/Warsaw', '2015-03-29 03:00:00', 'forward'], ['2015-03-29 02:20:00', 'Europe/Warsaw', '2015-03-29 01:59:59.999999999', 'backward'], ['2015-03-29 02:20:00', 'Europe/Warsaw', '2015-03-29 03:20:00', timedelta(hours=1)], ['2015-03-29 02:20:00', 'Europe/Warsaw', '2015-03-29 01:20:00', timedelta(hours=-1)], ['2018-03-11 02:33:00', 'US/Pacific', '2018-03-11 03:00:00', 'forward'], ['2018-03-11 02:33:00', 'US/Pacific', '2018-03-11 01:59:59.999999999', 'backward'], ['2018-03-11 02:33:00', 'US/Pacific', '2018-03-11 03:33:00', timedelta(hours=1)], ['2018-03-11 02:33:00', 'US/Pacific', '2018-03-11 01:33:00', timedelta(hours=-1)] ]) @pytest.mark.parametrize('tz_type', ['', 'dateutil/']) def test_timestamp_tz_localize_nonexistent_shift(self, start_ts, tz, end_ts, shift, tz_type): # GH 8917, 24466 tz = tz_type + tz if isinstance(shift, str): shift = 'shift_' + shift ts = Timestamp(start_ts) result = ts.tz_localize(tz, nonexistent=shift) expected = Timestamp(end_ts).tz_localize(tz) assert result == expected @pytest.mark.parametrize('offset', [-1, 1]) @pytest.mark.parametrize('tz_type', ['', 'dateutil/']) def test_timestamp_tz_localize_nonexistent_shift_invalid(self, offset, tz_type): # GH 8917, 24466 tz = tz_type + 'Europe/Warsaw' ts = Timestamp('2015-03-29 02:20:00') msg = "The provided timedelta will relocalize on a nonexistent time" with pytest.raises(ValueError, match=msg): ts.tz_localize(tz, nonexistent=timedelta(seconds=offset)) @pytest.mark.parametrize('tz', ['Europe/Warsaw', 'dateutil/Europe/Warsaw']) def test_timestamp_tz_localize_nonexistent_NaT(self, tz): # GH 8917 ts = Timestamp('2015-03-29 02:20:00') result = ts.tz_localize(tz, nonexistent='NaT') assert result is NaT @pytest.mark.parametrize('tz', ['Europe/Warsaw', 'dateutil/Europe/Warsaw']) def test_timestamp_tz_localize_nonexistent_raise(self, tz): # GH 8917 ts = Timestamp('2015-03-29 02:20:00') with pytest.raises(pytz.NonExistentTimeError): ts.tz_localize(tz, nonexistent='raise') with pytest.raises(ValueError): ts.tz_localize(tz, nonexistent='foo') # ------------------------------------------------------------------ # Timestamp.tz_convert @pytest.mark.parametrize('stamp', ['2014-02-01 09:00', '2014-07-08 09:00', '2014-11-01 17:00', '2014-11-05 00:00']) def test_tz_convert_roundtrip(self, stamp, tz_aware_fixture): tz = tz_aware_fixture ts = Timestamp(stamp, tz='UTC') converted = ts.tz_convert(tz) reset = converted.tz_convert(None) assert reset == Timestamp(stamp) assert reset.tzinfo is None assert reset == converted.tz_convert('UTC').tz_localize(None) @pytest.mark.parametrize('tzstr', ['US/Eastern', 'dateutil/US/Eastern']) def test_astimezone(self, tzstr): # astimezone is an alias for tz_convert, so keep it with # the tz_convert tests utcdate = Timestamp('3/11/2012 22:00', tz='UTC') expected = utcdate.tz_convert(tzstr) result = utcdate.astimezone(tzstr) assert expected == result assert isinstance(result, Timestamp) @td.skip_if_windows def test_tz_convert_utc_with_system_utc(self): from pandas._libs.tslibs.timezones import maybe_get_tz # from system utc to real utc ts = Timestamp('2001-01-05 11:56', tz=maybe_get_tz('dateutil/UTC')) # check that the time hasn't changed. assert ts == ts.tz_convert(dateutil.tz.tzutc()) # from system utc to real utc ts = Timestamp('2001-01-05 11:56', tz=maybe_get_tz('dateutil/UTC')) # check that the time hasn't changed. assert ts == ts.tz_convert(dateutil.tz.tzutc()) # ------------------------------------------------------------------ # Timestamp.__init__ with tz str or tzinfo def test_timestamp_constructor_tz_utc(self): utc_stamp = Timestamp('3/11/2012 05:00', tz='utc') assert utc_stamp.tzinfo is pytz.utc assert utc_stamp.hour == 5 utc_stamp = Timestamp('3/11/2012 05:00').tz_localize('utc') assert utc_stamp.hour == 5 def test_timestamp_to_datetime_tzoffset(self): tzinfo = tzoffset(None, 7200) expected = Timestamp('3/11/2012 04:00', tz=tzinfo) result = Timestamp(expected.to_pydatetime()) assert expected == result def test_timestamp_constructor_near_dst_boundary(self): # GH#11481 & GH#15777 # Naive string timestamps were being localized incorrectly # with tz_convert_single instead of tz_localize_to_utc for tz in ['Europe/Brussels', 'Europe/Prague']: result = Timestamp('2015-10-25 01:00', tz=tz) expected = Timestamp('2015-10-25 01:00').tz_localize(tz) assert result == expected with pytest.raises(pytz.AmbiguousTimeError): Timestamp('2015-10-25 02:00', tz=tz) result = Timestamp('2017-03-26 01:00', tz='Europe/Paris') expected = Timestamp('2017-03-26 01:00').tz_localize('Europe/Paris') assert result == expected with pytest.raises(pytz.NonExistentTimeError): Timestamp('2017-03-26 02:00', tz='Europe/Paris') # GH#11708 naive = Timestamp('2015-11-18 10:00:00') result = naive.tz_localize('UTC').tz_convert('Asia/Kolkata') expected = Timestamp('2015-11-18 15:30:00+0530', tz='Asia/Kolkata') assert result == expected # GH#15823 result = Timestamp('2017-03-26 00:00', tz='Europe/Paris') expected = Timestamp('2017-03-26 00:00:00+0100', tz='Europe/Paris') assert result == expected result = Timestamp('2017-03-26 01:00', tz='Europe/Paris') expected = Timestamp('2017-03-26 01:00:00+0100', tz='Europe/Paris') assert result == expected with pytest.raises(pytz.NonExistentTimeError): Timestamp('2017-03-26 02:00', tz='Europe/Paris') result = Timestamp('2017-03-26 02:00:00+0100', tz='Europe/Paris') naive = Timestamp(result.value) expected = naive.tz_localize('UTC').tz_convert('Europe/Paris') assert result == expected result = Timestamp('2017-03-26 03:00', tz='Europe/Paris') expected = Timestamp('2017-03-26 03:00:00+0200', tz='Europe/Paris') assert result == expected @pytest.mark.parametrize('tz', [pytz.timezone('US/Eastern'), gettz('US/Eastern'), 'US/Eastern', 'dateutil/US/Eastern']) def test_timestamp_constructed_by_date_and_tz(self, tz): # GH#2993, Timestamp cannot be constructed by datetime.date # and tz correctly result = Timestamp(date(2012, 3, 11), tz=tz) expected = Timestamp('3/11/2012', tz=tz) assert result.hour == expected.hour assert result == expected @pytest.mark.parametrize('tz', [pytz.timezone('US/Eastern'), gettz('US/Eastern'), 'US/Eastern', 'dateutil/US/Eastern']) def test_timestamp_add_timedelta_push_over_dst_boundary(self, tz): # GH#1389 # 4 hours before DST transition stamp = Timestamp('3/10/2012 22:00', tz=tz) result = stamp + timedelta(hours=6) # spring forward, + "7" hours expected = Timestamp('3/11/2012 05:00', tz=tz) assert result == expected def test_timestamp_timetz_equivalent_with_datetime_tz(self, tz_naive_fixture): # GH21358 tz = timezones.maybe_get_tz(tz_naive_fixture) stamp = Timestamp('2018-06-04 10:20:30', tz=tz) _datetime = datetime(2018, 6, 4, hour=10, minute=20, second=30, tzinfo=tz) result = stamp.timetz() expected = _datetime.timetz() assert result == expected